diff --git a/.github/actions/e2e-bake-compose/action.yml b/.github/actions/e2e-bake-compose/action.yml deleted file mode 100644 index 341757ef2fe1..000000000000 --- a/.github/actions/e2e-bake-compose/action.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Bake Compose for E2E -description: Bake Compose for E2E - -inputs: - github_actor: - description: Github actor - required: true - github_token: - description: Github token - required: true - -runs: - using: composite - - steps: - - name: Set up Depot CLI - uses: depot/setup-action@v1 - with: - oidc: true - - - name: Switch Docker Runtime to Depot - run: depot configure-docker - shell: bash - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ inputs.github_actor }} - password: ${{ inputs.github_token }} - - - name: Bake Compose - uses: nick-fields/retry@v3 - with: - shell: bash - command: cd frontend && depot bake -f docker-compose-e2e-tests.yml --load - max_attempts: 2 - retry_on: error - timeout_minutes: 10 diff --git a/.github/actions/local-e2e-tests/action.yml b/.github/actions/local-e2e-tests/action.yml deleted file mode 100644 index a92a636c19d4..000000000000 --- a/.github/actions/local-e2e-tests/action.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Run E2E tests in local environment -description: Run the E2E tests against local environment - -inputs: - slack_token: - description: The slack authentication token. - required: true - tests: - description: The space separated list of E2E tests to be executed. - required: false - default: '' - concurrency: - description: The concurrent number of browsers to be used on testing. - required: false - default: 3 - -runs: - using: composite - - steps: - - name: Run Local API - id: run-local-api - uses: ./.github/actions/run-local-api - with: - e2e_test_token: some-token - # As per https://stackoverflow.com/q/65497331/421808 172.17.0.1 seems like the only way to resolve host DB - database_url: postgres://postgres:postgres@172.17.0.1:5432/flagsmith - disable_analytics_features: true - - - name: Run E2E tests against local - uses: ./.github/actions/e2e-tests - env: - E2E_CONCURRENCY: ${{ inputs.concurrency }} - with: - e2e_test_token: some-token - slack_token: ${{ inputs.slack_token }} - environment: local - tests: ${{ inputs.tests }} - - - name: Output API container status and logs - if: failure() - env: - API_CONTAINER_ID: ${{ steps.run-local-api.outputs.containerId }} - run: | - docker inspect $API_CONTAINER_ID | jq '.[0].State' - docker logs $API_CONTAINER_ID - shell: bash diff --git a/.github/actions/run-local-api/action.yml b/.github/actions/run-local-api/action.yml deleted file mode 100644 index da36f318ef93..000000000000 --- a/.github/actions/run-local-api/action.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Run E2E tests -description: Run the E2E tests against an environment - -inputs: - database_url: - description: The database URL to connect the API to - required: true - e2e_test_token: - description: The token to use for authenticating the E2E test process - required: false - default: some-token - disable_analytics_features: - description: Disables analytics features. - required: false - default: 'false' - -outputs: - containerId: - description: Docker container ID for the running Flagsmith API instance - value: ${{ steps.run-api-container.outputs.containerId }} - -runs: - using: composite - - steps: - - name: Set up Depot CLI - uses: depot/setup-action@v1 - with: - oidc: true - - - name: Build temporary Docker image for running the API - run: depot build --load -t flagsmith/flagsmith-api:e2e-${{ github.sha }} -f api/Dockerfile . - shell: bash - - - name: Run the API - id: run-api-container - working-directory: api - env: - E2E_TEST_AUTH_TOKEN: ${{ inputs.e2e_test_token }} - DATABASE_URL: ${{ inputs.database_url }} - DISABLE_ANALYTICS_FEATURES: ${{ inputs.disable_analytics_features }} - run: | - CONTAINER_ID=$( docker run \ - -p 8000:8000 \ - -e DATABASE_URL=$DATABASE_URL \ - -e E2E_TEST_AUTH_TOKEN=$E2E_TEST_AUTH_TOKEN \ - -e DISABLE_ANALYTICS_FEATURES=$DISABLE_ANALYTICS_FEATURES \ - -e DJANGO_ALLOWED_HOSTS="*" \ - -e DJANGO_SETTINGS_MODULE=app.settings.test \ - -e ENABLE_FE_E2E=True \ - -e ACCESS_LOG_LOCATION=- \ - -d flagsmith/flagsmith-api:e2e-${{ github.sha }} ) - echo "containerId=$CONTAINER_ID" >> "$GITHUB_OUTPUT" - shell: bash diff --git a/.github/labeler.yml b/.github/labeler.yml index a3cf00942cec..108972be4f1e 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -12,6 +12,6 @@ api: - changed-files: - any-glob-to-any-file: api/** -github: +infrastructure: - changed-files: - - any-glob-to-any-file: .github/** + - any-glob-to-any-file: infrastructure/** diff --git a/.github/workflows/.reusable-docker-build.yml b/.github/workflows/.reusable-docker-build.yml new file mode 100644 index 000000000000..856180401aef --- /dev/null +++ b/.github/workflows/.reusable-docker-build.yml @@ -0,0 +1,84 @@ +# reusable workflow +name: Build Docker Image + +on: + workflow_call: + inputs: + registry-url: + type: string + description: Github container registry base URL + required: false + default: ghcr.io + file: + type: string + description: Path to the Dockerfile + required: true + image-name: + type: string + description: Image slug + required: true + build-args: + type: string + description: List of build-time variables + required: false + scan: + type: boolean + description: Whether to scan image for vulnerabilities + required: false + default: true + outputs: + image: + description: Resulting image specifier + value: ${{ inputs.registry-url }}/flagsmith/${{ inputs.image-name }}:${{ jobs.build.outputs.version }} + +jobs: + build: + name: Build ${{ inputs.scan && 'and verify ' || '' }}${{ inputs.image-name }} image + runs-on: ubuntu-latest + outputs: + version: ${{ steps.meta.outputs.version }} + + permissions: + packages: write + id-token: write + contents: read + + steps: + - name: Cloning repo + uses: actions/checkout@v4 + + - name: Set up Depot CLI + uses: depot/setup-action@v1 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ inputs.registry-url }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ inputs.registry-url }}/flagsmith/${{ inputs.image-name }} + tags: | + type=ref,event=branch + type=ref,event=tag + type=ref,event=pr + + - name: Build and push image + uses: depot/build-push-action@v1 + with: + push: true + build-args: ${{ inputs.build-args }} + file: ${{ inputs.file }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + - name: Run Trivy vulnerability scanner + if: ${{ inputs.scan }} + uses: aquasecurity/trivy-action@master + with: + image-ref: ${{ inputs.registry-url }}/flagsmith/${{ inputs.image-name }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/.reusable-docker-e2e-tests.yml b/.github/workflows/.reusable-docker-e2e-tests.yml new file mode 100644 index 000000000000..d85dbc5cf1da --- /dev/null +++ b/.github/workflows/.reusable-docker-e2e-tests.yml @@ -0,0 +1,58 @@ +# reusable workflow +name: Run Docker E2E tests + +on: + workflow_call: + inputs: + api-image: + type: string + description: Core API Docker image to use, e.g., `ghcr.io/flagsmith/flagsmith-api:main` + required: true + e2e-image: + type: string + description: Frontend Docker with E2E capabilities image to use, e.g., `ghcr.io/flagsmith/flagsmith-e2e:main` + required: true + tests: + type: string + description: Space-delimited list of E2E tests to be executed + required: false + default: '' + concurrency: + type: number + description: The concurrent number of browsers to be used on testing + required: false + default: 3 + +jobs: + run-e2e: + name: "E2E${{ inputs.tests && format(': {0}', inputs.tests) || '' }}" + runs-on: ubuntu-latest + + steps: + - name: Cloning repo + uses: actions/checkout@v4 + + - name: Run tests on dockerised frontend + uses: nick-fields/retry@v3 + with: + shell: bash + command: | + cd frontend + make test + max_attempts: 2 + retry_on: error + timeout_minutes: 10 + env: + opts: ${{ inputs.tests }} + API_IMAGE: ${{ inputs.api-image }} + E2E_IMAGE: ${{ inputs.e2e-image }} + E2E_CONCURRENCY: ${{ inputs.concurrency }} + SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }} + GITHUB_ACTION_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + + - name: Output Core API container status and logs + if: failure() + working-directory: frontend + run: | + docker compose -f docker-compose-e2e-tests.yml logs flagsmith-api + shell: bash diff --git a/.github/workflows/api-deploy-production-ecs.yml b/.github/workflows/api-deploy-production-ecs.yml index 8f170854ab64..31332a50a7a6 100644 --- a/.github/workflows/api-deploy-production-ecs.yml +++ b/.github/workflows/api-deploy-production-ecs.yml @@ -36,7 +36,7 @@ jobs: aws_identity_migration_event_bus_rule_id: identity_migration-b03c433 aws_identity_migration_task_role_arn: arn:aws:iam::084060095745:role/task-exec-role-741a7e3 aws_task_definitions_directory_path: infrastructure/aws/production - flagsmith_saml_revision: v1.4.0 + flagsmith_saml_revision: v1.6.0 flagsmith_auth_controller_revision: v0.0.1 flagsmith_rbac_revision: v0.7.0 sse_pgp_private_key: ${{ secrets.SSE_PGP_PRIVATE_KEY }} diff --git a/.github/workflows/api-deploy-staging-ecs.yml b/.github/workflows/api-deploy-staging-ecs.yml index ab666d2a59f9..e43fe68b538f 100644 --- a/.github/workflows/api-deploy-staging-ecs.yml +++ b/.github/workflows/api-deploy-staging-ecs.yml @@ -37,7 +37,7 @@ jobs: aws_identity_migration_event_bus_rule_id: identity_migration-08330ed aws_identity_migration_task_role_arn: arn:aws:iam::302456015006:role/task-exec-role-6fb76f6 aws_task_definitions_directory_path: infrastructure/aws/staging - flagsmith_saml_revision: v1.4.0 + flagsmith_saml_revision: v1.6.0 flagsmith_auth_controller_revision: v0.0.1 flagsmith_rbac_revision: v0.7.0 sse_pgp_private_key: ${{ secrets.SSE_PGP_PRIVATE_KEY }} diff --git a/.github/workflows/api-merge.yml b/.github/workflows/api-merge.yml deleted file mode 100644 index a886d43c9ae6..000000000000 --- a/.github/workflows/api-merge.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: API Merge - -on: - merge_group: - types: [checks_requested] - -defaults: - run: - working-directory: api - -jobs: - test: - runs-on: General-Purpose-8c-Runner - name: API Unit Tests - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Install poetry - run: pipx install poetry - - - uses: actions/setup-python@v5 - with: - python-version: 3.12 - cache: 'poetry' - - - name: Install Dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: make install-packages - - - name: Run Linters - run: make lint - - - name: Create analytics database - env: - PGPASSWORD: postgres - run: createdb -h localhost -U postgres -p 5432 analytics - - - name: Check for missing migrations - env: - DOTENV_OVERRIDE_FILE: .env-ci - opts: --no-input --dry-run --check - run: make django-make-migrations - - - name: Run Tests - env: - DOTENV_OVERRIDE_FILE: .env-ci - run: make test - - - name: Upload Coverage - uses: codecov/codecov-action@v4 - env: - PYTHON: '3.12' - with: - token: ${{ secrets.CODECOV_TOKEN }} - env_vars: PYTHON diff --git a/.github/workflows/api-pull-request.yml b/.github/workflows/api-pull-request.yml index 4f42c73ebd35..de0efc679cc9 100644 --- a/.github/workflows/api-pull-request.yml +++ b/.github/workflows/api-pull-request.yml @@ -32,6 +32,11 @@ jobs: ports: ['5432:5432'] options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + strategy: + max-parallel: 2 + matrix: + python-version: ['3.11', '3.12'] + steps: - name: Cloning repo uses: actions/checkout@v4 @@ -41,7 +46,7 @@ jobs: - uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: ${{ matrix.python-version }} cache: 'poetry' - name: Install Dependencies @@ -70,7 +75,7 @@ jobs: - name: Upload Coverage uses: codecov/codecov-action@v4 env: - PYTHON: 3.12 + PYTHON: ${{ matrix.python-version }} with: token: ${{ secrets.CODECOV_TOKEN }} env_vars: PYTHON diff --git a/.github/workflows/platform-docker-publish-all-features-image.yml b/.github/workflows/platform-docker-publish-all-features-image.yml index b98dd762ee90..2ef5f8b7e693 100644 --- a/.github/workflows/platform-docker-publish-all-features-image.yml +++ b/.github/workflows/platform-docker-publish-all-features-image.yml @@ -6,7 +6,7 @@ on: - '*' env: - FLAGSMITH_SAML_REVISION: v1.4.0 + FLAGSMITH_SAML_REVISION: v1.6.0 FLAGSMITH_RBAC_REVISION: v0.7.0 FLAGSMITH_AUTH_CONTROLLER_REVISION: v0.0.1 diff --git a/.github/workflows/platform-pull-request.yml b/.github/workflows/platform-pull-request.yml index 00409d6e90f5..e55ab17e57a1 100644 --- a/.github/workflows/platform-pull-request.yml +++ b/.github/workflows/platform-pull-request.yml @@ -6,316 +6,99 @@ on: paths-ignore: - docs/** - infrastructure/** + branches-ignore: + - release-please-* jobs: validate-pr-title: name: Validate Conventional Commit title runs-on: ubuntu-latest steps: - - uses: amannn/action-semantic-pull-request@v5 + - name: Check PR Conventional Commit title + uses: amannn/action-semantic-pull-request@v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: types: | # mirrors changelog-sections in the /release-please-config.json - fix feat + fix infra + ci docs - chore - build deps - ci + perf refactor - style test + chore - run-e2e-tests-1: - runs-on: ubuntu-latest - name: E2E Local - Segments-1, Environment - - permissions: - id-token: write - contents: read - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: flagsmith - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Run E2E tests against local - uses: ./.github/actions/local-e2e-tests - with: - slack_token: ${{ secrets.SLACK_TOKEN }} - tests: segment-part-1 environment - concurrency: 1 - - run-e2e-tests-2: - runs-on: ubuntu-latest - name: E2E Local - Segments-2 - - permissions: - id-token: write - contents: read - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: flagsmith - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Run E2E tests against local - uses: ./.github/actions/local-e2e-tests - with: - slack_token: ${{ secrets.SLACK_TOKEN }} - tests: segment-part-2 - concurrency: 1 - - run-e2e-tests-3: - runs-on: ubuntu-latest - name: E2E Local - Segments-3, Signup, Flag, Invite, Project - - permissions: - id-token: write - contents: read - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: flagsmith - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Run E2E tests against local - uses: ./.github/actions/local-e2e-tests - with: - slack_token: ${{ secrets.SLACK_TOKEN }} - tests: segment-part-3 signup flag invite project - concurrency: 2 - - run-e2e-tests-4: - runs-on: ubuntu-latest - name: E2E Local - Versioning - - permissions: - id-token: write - contents: read - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: flagsmith - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Run E2E tests against local - uses: ./.github/actions/local-e2e-tests - with: - slack_token: ${{ secrets.SLACK_TOKEN }} - tests: versioning - concurrency: 1 - - run-e2e-versioning-tests-docker-unified: - runs-on: ubuntu-latest - name: E2E Unified - Versioning - - permissions: - id-token: write - contents: read - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Bake Compose - uses: ./.github/actions/e2e-bake-compose - with: - github_actor: ${{github.actor}} - github_token: ${{secrets.GITHUB_TOKEN}} - - - name: Run tests on unified docker image - working-directory: frontend - env: - SLACK_TOKEN: ${{ inputs.slack_token }} - GITHUB_ACTION_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - run: | - docker compose -f docker-compose-e2e-tests.yml run frontend npx cross-env E2E_CONCURRENCY=1 npm run test -- versioning - - run-e2e-segments-1-tests-docker-unified: - runs-on: ubuntu-latest - name: E2E Unified - Segments-1, Environment - - permissions: - id-token: write - contents: read - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Bake Compose - uses: ./.github/actions/e2e-bake-compose - with: - github_actor: ${{github.actor}} - github_token: ${{secrets.GITHUB_TOKEN}} - - - name: Run tests on unified docker image - working-directory: frontend - env: - SLACK_TOKEN: ${{ inputs.slack_token }} - GITHUB_ACTION_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - run: | - docker compose -f docker-compose-e2e-tests.yml run frontend npx cross-env E2E_CONCURRENCY=1 npm run test -- segment-part-1 environment - - run-e2e-segments-2-tests-docker-unified: - runs-on: ubuntu-latest - name: E2E Unified - Segments-2 - - permissions: - id-token: write - contents: read - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Bake Compose - uses: ./.github/actions/e2e-bake-compose - with: - github_actor: ${{github.actor}} - github_token: ${{secrets.GITHUB_TOKEN}} - - - name: Run tests on unified docker image - working-directory: frontend - env: - SLACK_TOKEN: ${{ inputs.slack_token }} - GITHUB_ACTION_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - run: | - docker compose -f docker-compose-e2e-tests.yml run frontend npx cross-env E2E_CONCURRENCY=1 npm run test -- segment-part-2 - - run-e2e-other-tests-docker-unified: - runs-on: ubuntu-latest - name: E2E Unified - Segments-3, Signup, Flag, Invite, Project - - permissions: - id-token: write - contents: read - - steps: - - name: Cloning repo - uses: actions/checkout@v4 - - - name: Bake Compose - uses: ./.github/actions/e2e-bake-compose + - name: Auto-label PR with Conventional Commit title + uses: bcoe/conventional-release-labels@v1 with: - github_actor: ${{github.actor}} - github_token: ${{secrets.GITHUB_TOKEN}} - - - name: Run tests on unified docker image - working-directory: frontend - env: - SLACK_TOKEN: ${{ inputs.slack_token }} - GITHUB_ACTION_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - run: | - docker compose -f docker-compose-e2e-tests.yml run frontend npx cross-env E2E_CONCURRENCY=2 npm run test -- segment-part-3 signup flag invite project + type_labels: | + { + "feat": "feature", + "fix": "fix", + "infra": "infrastructure", + "ci": "ci-cd", + "docs": "docs", + "deps": "dependencies", + "perf": "performance", + "refactor": "refactor", + "test": "testing", + "chore": "chore" + } + ignored_types: '[]' docker-build-unified: if: github.event.pull_request.draft == false - name: Test unified docker image build - runs-on: ubuntu-latest - - permissions: - id-token: write - contents: read - - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - - - name: Set up Depot CLI - uses: depot/setup-action@v1 - - - name: Build - id: docker_build - uses: depot/build-push-action@v1 - with: - push: false - tags: flagsmith/flagsmith:testing + name: Build Unified Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: Dockerfile + image-name: flagsmith docker-build-api: if: github.event.pull_request.draft == false - name: Test api docker image build - runs-on: ubuntu-latest - - permissions: - id-token: write - contents: read - - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - - - name: Set up Depot CLI - uses: depot/setup-action@v1 - - - name: Build - id: docker_build - uses: depot/build-push-action@v1 - with: - file: api/Dockerfile - push: false - tags: flagsmith/flagsmith-api:testing + name: Build API Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: api/Dockerfile + image-name: flagsmith-api docker-build-frontend: if: github.event.pull_request.draft == false - name: Test frontend docker image build - runs-on: ubuntu-latest + name: Build Frontend Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: frontend/Dockerfile + image-name: flagsmith-frontend - permissions: - id-token: write - contents: read - - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - - - name: Set up Depot CLI - uses: depot/setup-action@v1 - - - name: Build - id: docker_build - uses: depot/build-push-action@v1 - with: - file: frontend/Dockerfile - push: false - tags: flagsmith/flagsmith-frontend:testing + docker-build-e2e: + if: github.event.pull_request.draft == false + name: Build E2E Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: frontend/Dockerfile.e2e + image-name: flagsmith-e2e + scan: false + + run-e2e-tests: + needs: [docker-build-api, docker-build-e2e] + uses: ./.github/workflows/.reusable-docker-e2e-tests.yml + with: + api-image: ${{ needs.docker-build-api.outputs.image }} + e2e-image: ${{ needs.docker-build-e2e.outputs.image }} + concurrency: ${{ matrix.args.concurrency }} + tests: ${{ matrix.args.tests }} + + strategy: + matrix: + args: + - tests: segment-part-1 environment + concurrency: 1 + - tests: segment-part-2 + concurrency: 1 + - tests: segment-part-3 signup flag invite project + concurrency: 2 + - tests: versioning + concurrency: 1 diff --git a/.github/workflows/platform-test-merge-to-main.yml b/.github/workflows/platform-test-merge-to-main.yml index 7dea29cf7f61..aefc752e4087 100644 --- a/.github/workflows/platform-test-merge-to-main.yml +++ b/.github/workflows/platform-test-merge-to-main.yml @@ -6,47 +6,24 @@ on: - main jobs: - run-e2e-tests: - runs-on: ubuntu-latest - name: Full E2E tests - - services: - postgres: - image: postgres:11.12-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: flagsmith - ports: ['5432:5432'] - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - permissions: - id-token: write - contents: read - - steps: - - name: Cloning repo - uses: actions/checkout@v4 + docker-build-api: + name: Build API Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: api/Dockerfile + image-name: flagsmith-api + + docker-build-e2e: + name: Build E2E Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + file: frontend/Dockerfile.e2e + image-name: flagsmith-e2e + scan: false - - name: Run Local API - id: run-local-api - uses: ./.github/actions/run-local-api - with: - e2e_test_token: some-token - # As per https://stackoverflow.com/q/65497331/421808 172.17.0.1 seems like the only way to resolve host DB - database_url: postgres://postgres:postgres@172.17.0.1:5432/flagsmith - disable_analytics_features: true - - - name: Run E2E tests against local - uses: ./.github/actions/e2e-tests - with: - e2e_test_token: some-token - slack_token: ${{ secrets.SLACK_TOKEN }} - environment: local - github_action_url: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - - - name: Output API logs - if: failure() - env: - API_CONTAINER_ID: ${{ steps.run-local-api.outputs.containerId }} - run: docker logs $API_CONTAINER_ID + run-e2e-tests: + needs: [docker-build-api, docker-build-e2e] + uses: ./.github/workflows/.reusable-docker-e2e-tests.yml + with: + api-image: ${{ needs.docker-build-api.outputs.image }} + e2e-image: ${{ needs.docker-build-e2e.outputs.image }} diff --git a/.github/workflows/uffizzi-build.yml b/.github/workflows/uffizzi-build.yml index 98f1b56d6679..9d45afb69e11 100644 --- a/.github/workflows/uffizzi-build.yml +++ b/.github/workflows/uffizzi-build.yml @@ -4,6 +4,8 @@ on: types: [opened, synchronize, reopened, closed] paths-ignore: - docs/** + branches-ignore: + - release-please-* jobs: build-flagsmith: diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c30b26bdaaba..29ced278020a 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.116.0" + ".": "2.119.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ee5f418726e1..790ee3015e00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,133 @@ # Changelog +## [2.119.1](https://github.com/Flagsmith/flagsmith/compare/v2.119.0...v2.119.1) (2024-06-06) + + +### Bug Fixes + +* use python3.11 as base image ([#4121](https://github.com/Flagsmith/flagsmith/issues/4121)) ([418e026](https://github.com/Flagsmith/flagsmith/commit/418e026796863ba361c8750c8da6f2abd5e1b283)) + + +### Infrastructure (Flagsmith SaaS Only) + +* run influxdb feature evaluation in thread ([#4125](https://github.com/Flagsmith/flagsmith/issues/4125)) ([b135b38](https://github.com/Flagsmith/flagsmith/commit/b135b38703be391ec68a841a8d9d11635742cf89)) +* task processor settings tweaks ([#4126](https://github.com/Flagsmith/flagsmith/issues/4126)) ([ea96db9](https://github.com/Flagsmith/flagsmith/commit/ea96db9f70e90476fb2e829715571d063ca615b8)) + +## [2.119.0](https://github.com/Flagsmith/flagsmith/compare/v2.118.1...v2.119.0) (2024-06-06) + + +### Features + +* Improve API key UX ([#4102](https://github.com/Flagsmith/flagsmith/issues/4102)) ([1600ed7](https://github.com/Flagsmith/flagsmith/commit/1600ed7633827051b92bf94e83b01bc493473da9)) + + +### Bug Fixes + +* Add autocomplete for login ([#4103](https://github.com/Flagsmith/flagsmith/issues/4103)) ([5ffdd51](https://github.com/Flagsmith/flagsmith/commit/5ffdd51d61b2840b927eca0c6ca26dd8c06e7382)) +* announcement width ([#4122](https://github.com/Flagsmith/flagsmith/issues/4122)) ([f6ac4e5](https://github.com/Flagsmith/flagsmith/commit/f6ac4e52b9c3c0457aef3e096d82dbef5c64a53c)) +* delete environment refreshing list ([#4107](https://github.com/Flagsmith/flagsmith/issues/4107)) ([902b3cd](https://github.com/Flagsmith/flagsmith/commit/902b3cdbd26fc6c4c7dd53e9d0094773365a5b8f)) +* environment click sizes ([#4104](https://github.com/Flagsmith/flagsmith/issues/4104)) ([9d1622f](https://github.com/Flagsmith/flagsmith/commit/9d1622f7e6445b1b88ad5a369ea6174746458910)) +* Environment creating state ([#4060](https://github.com/Flagsmith/flagsmith/issues/4060)) ([652af8f](https://github.com/Flagsmith/flagsmith/commit/652af8f9f2f87e65eb06187c17a7979075f0a57a)) +* Flag update scheduling ([#4115](https://github.com/Flagsmith/flagsmith/issues/4115)) ([e90d248](https://github.com/Flagsmith/flagsmith/commit/e90d248de1026200c6134f32b7b104e62ddd69ae)) +* Limit feature paging to 50 ([#4120](https://github.com/Flagsmith/flagsmith/issues/4120)) ([c14c3a8](https://github.com/Flagsmith/flagsmith/commit/c14c3a86995e0b44ee080322dda41633d101e56e)) +* Protect inputs from autofill ([#3980](https://github.com/Flagsmith/flagsmith/issues/3980)) ([dad3041](https://github.com/Flagsmith/flagsmith/commit/dad304101e073fd7767b8623751f52e4d806a330)) +* Reload integrations on create ([#4106](https://github.com/Flagsmith/flagsmith/issues/4106)) ([5155018](https://github.com/Flagsmith/flagsmith/commit/5155018a38d717f167c9e9794067a4013da66c3a)) +* save empty segment overrides request ([#4112](https://github.com/Flagsmith/flagsmith/issues/4112)) ([6dbcb4a](https://github.com/Flagsmith/flagsmith/commit/6dbcb4aadace2dccb5c32adfbe3f145ac894982d)) +* show audit logs url ([#4123](https://github.com/Flagsmith/flagsmith/issues/4123)) ([bc256ee](https://github.com/Flagsmith/flagsmith/commit/bc256ee6f1788d00a31ad9ee566a895e4484c9f9)) +* **versioning:** scheduled changes incorrectly considered live ([#4119](https://github.com/Flagsmith/flagsmith/issues/4119)) ([6856e64](https://github.com/Flagsmith/flagsmith/commit/6856e64ae1028336801fc1c6acd3fd5da8bff8db)) +* **versioning:** send live from when creating versions for change requests ([#4116](https://github.com/Flagsmith/flagsmith/issues/4116)) ([765b12a](https://github.com/Flagsmith/flagsmith/commit/765b12a8c049ec65956707c0ab5792376d5dee47)) +* **versioning:** use version live from ([#4118](https://github.com/Flagsmith/flagsmith/issues/4118)) ([0345aff](https://github.com/Flagsmith/flagsmith/commit/0345aff4146596f29ff294941fe0ee27ad6db735)) + + +### Infrastructure (Flagsmith SaaS Only) + +* reduce Sentry sampling rate ([#4098](https://github.com/Flagsmith/flagsmith/issues/4098)) ([da3f186](https://github.com/Flagsmith/flagsmith/commit/da3f1863a5c0c90b99bf30a8991b9617a037e8ea)) + +## [2.118.1](https://github.com/Flagsmith/flagsmith/compare/v2.118.0...v2.118.1) (2024-06-03) + + +### Bug Fixes + +* **audit:** audit and history UI tweaks ([#4092](https://github.com/Flagsmith/flagsmith/issues/4092)) ([e65dc34](https://github.com/Flagsmith/flagsmith/commit/e65dc345850c4c656b55bea9337571994a706ba7)) +* facilitate FE display of environment version from audit log ([#4077](https://github.com/Flagsmith/flagsmith/issues/4077)) ([be9b7ce](https://github.com/Flagsmith/flagsmith/commit/be9b7ce1f11a343b1c1d2566d851f11e909cebbc)) +* select propagation ([#4085](https://github.com/Flagsmith/flagsmith/issues/4085)) ([0e16068](https://github.com/Flagsmith/flagsmith/commit/0e160684e37d5640d4d2be94036765d2d7d5af5d)) +* **sentry-FLAGSMITH-API-4FY:** resolve metadata segment n+1 ([#4030](https://github.com/Flagsmith/flagsmith/issues/4030)) ([a22f86c](https://github.com/Flagsmith/flagsmith/commit/a22f86c6a5555960be8227fecf6afb7f8b2d2011)) +* **versioning:** ensure get_previous_version returns previous version, not latest version ([#4083](https://github.com/Flagsmith/flagsmith/issues/4083)) ([22d371b](https://github.com/Flagsmith/flagsmith/commit/22d371bd60650abce4c692e1b3032bbd5c1b8e7f)) +* **versioning:** ensure that audit log record is created when committing versions via CR ([#4091](https://github.com/Flagsmith/flagsmith/issues/4091)) ([8246dca](https://github.com/Flagsmith/flagsmith/commit/8246dca4f468e851ee5bf0544ca0e4ba0409712e)) +* **versioning:** prevent FeatureSegment from writing audit log on delete when v2 versioning enabled ([#4088](https://github.com/Flagsmith/flagsmith/issues/4088)) ([60c0748](https://github.com/Flagsmith/flagsmith/commit/60c07480b298e8a32321e631fe0ec178cdc5f017)) + +## [2.118.0](https://github.com/Flagsmith/flagsmith/compare/v2.117.1...v2.118.0) (2024-05-31) + + +### Features + +* add audit log when environment feature version is published ([#4064](https://github.com/Flagsmith/flagsmith/issues/4064)) ([88cfc76](https://github.com/Flagsmith/flagsmith/commit/88cfc762f201967f5a4a2b362eecae444b1a2b19)) + + +### Bug Fixes + +* don't create audit log for FeatureStateValue when not published ([#4065](https://github.com/Flagsmith/flagsmith/issues/4065)) ([8b73b5c](https://github.com/Flagsmith/flagsmith/commit/8b73b5c5bb4336d5fe5947e3fe3004736c942ae2)) +* versioned remove segment override ([#4063](https://github.com/Flagsmith/flagsmith/issues/4063)) ([e4cd25a](https://github.com/Flagsmith/flagsmith/commit/e4cd25ae9d79c66447377bb087061193a6264eff)) + +## [2.117.1](https://github.com/Flagsmith/flagsmith/compare/v2.117.0...v2.117.1) (2024-05-30) + + +### Bug Fixes + +* Validate feature values before saving ([#4043](https://github.com/Flagsmith/flagsmith/issues/4043)) ([fef9f8f](https://github.com/Flagsmith/flagsmith/commit/fef9f8fdcc3c7153bdc6752cfbfa5df2cffe3b62)) + +## [2.117.0](https://github.com/Flagsmith/flagsmith/compare/v2.116.3...v2.117.0) (2024-05-30) + + +### Features + +* Add api usage metrics for different periods ([#3870](https://github.com/Flagsmith/flagsmith/issues/3870)) ([50cc369](https://github.com/Flagsmith/flagsmith/commit/50cc369d26d7ec5c418faadcd1079c1e027a6f0e)) +* Add endpoint to fetch GitHub repository contributors ([#4013](https://github.com/Flagsmith/flagsmith/issues/4013)) ([6f321d4](https://github.com/Flagsmith/flagsmith/commit/6f321d45898d2c9b7159388df6b850dd873ee68d)) +* Add grace period to api usage billing ([#4038](https://github.com/Flagsmith/flagsmith/issues/4038)) ([3b61f83](https://github.com/Flagsmith/flagsmith/commit/3b61f831d44ed6a7d31374fc8fd42017339fed84)) +* **analytics:** Add command to migrate analytics data to pg ([#3981](https://github.com/Flagsmith/flagsmith/issues/3981)) ([848db5a](https://github.com/Flagsmith/flagsmith/commit/848db5adc540023f5911826ae10f9d43032cad02)) +* Implement be search and lazy loading for GitHub resources ([#3987](https://github.com/Flagsmith/flagsmith/issues/3987)) ([c896c50](https://github.com/Flagsmith/flagsmith/commit/c896c507a6ead8075b3c77d3aa834c057c0cc909)) +* Improvements in the GitHub integration BE ([#3962](https://github.com/Flagsmith/flagsmith/issues/3962)) ([59ddfba](https://github.com/Flagsmith/flagsmith/commit/59ddfba70103d021dab1e6ad5726d21f7c3802eb)) + + +### Bug Fixes + +* Add support for versioning v2 on GitHub resource linking ([#4015](https://github.com/Flagsmith/flagsmith/issues/4015)) ([edb4a75](https://github.com/Flagsmith/flagsmith/commit/edb4a7591bfb11cedb01a63a3fe23d2d4f2c63c8)) +* GitHub repos unique constraint and delete ([#4037](https://github.com/Flagsmith/flagsmith/issues/4037)) ([7454e4a](https://github.com/Flagsmith/flagsmith/commit/7454e4ae7f4df36ff0fb605b6149c8542c5428d6)) +* **sentry-FLAGSMITH-API-4FZ:** fix PATCH for segments ([#4029](https://github.com/Flagsmith/flagsmith/issues/4029)) ([3c43bb8](https://github.com/Flagsmith/flagsmith/commit/3c43bb8113c21558583271e13a7a11264a5c4955)) +* Set api usage billing to 100k ([#3996](https://github.com/Flagsmith/flagsmith/issues/3996)) ([d86f8e7](https://github.com/Flagsmith/flagsmith/commit/d86f8e7857f882aaa4dfae22760d6e9b3e594246)) +* Set billing starts at to reasonable default for API usage notifications ([#4054](https://github.com/Flagsmith/flagsmith/issues/4054)) ([515b34c](https://github.com/Flagsmith/flagsmith/commit/515b34c404b2070f28b128a29eba0bdda8f7a71b)) +* Set billing term starts at 30 days for null values ([#4053](https://github.com/Flagsmith/flagsmith/issues/4053)) ([84c0835](https://github.com/Flagsmith/flagsmith/commit/84c0835d710a57cc940b17e4ad22def8307419be)) +* Setting `LOG_FORMAT: json` does not write stack traces to logs ([#4040](https://github.com/Flagsmith/flagsmith/issues/4040)) ([9e2ffd2](https://github.com/Flagsmith/flagsmith/commit/9e2ffd2e3b1cf52f3f5773fc312a429413481224)) +* Switch function argument to date start ([#4052](https://github.com/Flagsmith/flagsmith/issues/4052)) ([d8f48a7](https://github.com/Flagsmith/flagsmith/commit/d8f48a7ea74e7b2668880513ca63045209ca4d80)) + + +### Infrastructure (Flagsmith SaaS Only) + +* add influx token secret ([#4048](https://github.com/Flagsmith/flagsmith/issues/4048)) ([1963e03](https://github.com/Flagsmith/flagsmith/commit/1963e03c23545ff91bcd5bbc185baa6b309fdd5e)) +* remove duplicate secret definition ([#4049](https://github.com/Flagsmith/flagsmith/issues/4049)) ([adc6429](https://github.com/Flagsmith/flagsmith/commit/adc6429b0c6426ac9b1ef83339f58fee08fb70a4)) +* Setup InfluxDB on staging for analytics ([#4042](https://github.com/Flagsmith/flagsmith/issues/4042)) ([d9d503a](https://github.com/Flagsmith/flagsmith/commit/d9d503a8aa5c31cb7359946c07b192d70a4f930c)) + +## [2.116.3](https://github.com/Flagsmith/flagsmith/compare/v2.116.2...v2.116.3) (2024-05-22) + + +### Bug Fixes + +* **versioning:** webhooks not triggered when new version published ([#3953](https://github.com/Flagsmith/flagsmith/issues/3953)) ([fb2191b](https://github.com/Flagsmith/flagsmith/commit/fb2191b34fef9b6d45d7eda17e22b77d826d4a19)) + +## [2.116.2](https://github.com/Flagsmith/flagsmith/compare/v2.116.1...v2.116.2) (2024-05-22) + + +### Bug Fixes + +* **versioning:** segment overrides limit ([#4007](https://github.com/Flagsmith/flagsmith/issues/4007)) ([918b731](https://github.com/Flagsmith/flagsmith/commit/918b73148e180d91e794ea8b840310b61ffe6300)) + +## [2.116.1](https://github.com/Flagsmith/flagsmith/compare/v2.116.0...v2.116.1) (2024-05-21) + + +### Bug Fixes + +* **versioning:** fix cloning environments using v2 versioning ([#3999](https://github.com/Flagsmith/flagsmith/issues/3999)) ([eef02fb](https://github.com/Flagsmith/flagsmith/commit/eef02fb75de85a75b169561b9055b533f3c71bfb)) + ## [2.116.0](https://github.com/Flagsmith/flagsmith/compare/v2.115.0...v2.116.0) (2024-05-20) diff --git a/Dockerfile b/Dockerfile index 5bbb78c1709d..d983ae6e66ac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,7 @@ ENV STATIC_ASSET_CDN_URL=/static/ RUN cd frontend && npm run bundledjango # Step 2 - Build Python virtualenv -FROM python:3.12 as build-python +FROM python:3.11 as build-python WORKDIR /app COPY api/pyproject.toml api/poetry.lock api/Makefile ./ @@ -37,7 +37,7 @@ ARG POETRY_OPTS RUN make install-packages opts="${POETRY_OPTS}" # Step 3 - Build Django Application -FROM python:3.12-slim as application +FROM python:3.11-slim as application WORKDIR /app # Install SAML dependency if required @@ -49,7 +49,7 @@ ARG TARGETARCH RUN if [ "${TARGETARCH}" != "amd64" ]; then apt-get update && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/*; fi; # Copy the python venv from step 2 -COPY --from=build-python /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages +COPY --from=build-python /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages # Copy the bin folder as well to copy the executables created in package installation COPY --from=build-python /usr/local/bin /usr/local/bin diff --git a/api/Dockerfile b/api/Dockerfile index 85e222b0beac..0e7382b4bc40 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,6 +1,6 @@ # Step 1 - Build Python virtualenv -FROM python:3.12 as build-python +FROM python:3.11 as build-python WORKDIR /app COPY api/pyproject.toml api/poetry.lock api/Makefile ./ @@ -18,7 +18,7 @@ ARG POETRY_OPTS RUN make install-packages opts="${POETRY_OPTS}" # Step 2 - Build Django Application -FROM python:3.12-slim as application +FROM python:3.11-slim as application WORKDIR /app # Install SAML dependency if required @@ -37,7 +37,7 @@ RUN --mount=type=secret,id=sse_pgp_pkey if [ -f /run/secrets/sse_pgp_pkey ]; th # Copy the python venv from step 2 -COPY --from=build-python /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages +COPY --from=build-python /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages # Copy the bin folder as well to copy the executables created in package installation COPY --from=build-python /usr/local/bin /usr/local/bin diff --git a/api/app/settings/common.py b/api/app/settings/common.py index 996e16c97ca6..38bc38ebb1f8 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -127,7 +127,7 @@ "api_keys", "features.feature_external_resources", # 2FA - "trench", + "custom_auth.mfa.trench", # health check plugins "health_check", "health_check.db", @@ -750,7 +750,6 @@ } TRENCH_AUTH = { - "FROM_EMAIL": DEFAULT_FROM_EMAIL, "BACKUP_CODES_QUANTITY": 5, "BACKUP_CODES_LENGTH": 10, # keep (quantity * length) under 200 "BACKUP_CODES_CHARACTERS": ( @@ -759,6 +758,7 @@ "DEFAULT_VALIDITY_PERIOD": 30, "CONFIRM_BACKUP_CODES_REGENERATION_WITH_CODE": True, "APPLICATION_ISSUER_NAME": "app.bullet-train.io", + "ENCRYPT_BACKUP_CODES": True, "MFA_METHODS": { "app": { "VERBOSE_NAME": "TOTP App", diff --git a/api/app_analytics/analytics_db_service.py b/api/app_analytics/analytics_db_service.py index d8e6effd0b41..8c2d191eadc7 100644 --- a/api/app_analytics/analytics_db_service.py +++ b/api/app_analytics/analytics_db_service.py @@ -1,4 +1,4 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta from typing import List from app_analytics.dataclasses import FeatureEvaluationData, UsageData @@ -14,34 +14,107 @@ FeatureEvaluationBucket, Resource, ) +from dateutil.relativedelta import relativedelta from django.conf import settings from django.db.models import Sum from django.utils import timezone from environments.models import Environment from features.models import Feature +from organisations.models import Organisation -ANALYTICS_READ_BUCKET_SIZE = 15 +from . import constants +from .types import PERIOD_TYPE def get_usage_data( - organisation, environment_id=None, project_id=None -) -> List[UsageData]: + organisation: Organisation, + environment_id: int | None = None, + project_id: int | None = None, + period: PERIOD_TYPE | None = None, +) -> list[UsageData]: + now = timezone.now() + + date_stop = date_start = None + period_starts_at = period_ends_at = None + + match period: + case constants.CURRENT_BILLING_PERIOD: + if not getattr(organisation, "subscription_information_cache", None): + return [] + sub_cache = organisation.subscription_information_cache + starts_at = sub_cache.current_billing_term_starts_at or now - timedelta( + days=30 + ) + month_delta = relativedelta(now, starts_at).months + period_starts_at = relativedelta(months=month_delta) + starts_at + period_ends_at = now + date_start = f"-{(now - period_starts_at).days}d" + date_stop = "now()" + + case constants.PREVIOUS_BILLING_PERIOD: + if not getattr(organisation, "subscription_information_cache", None): + return [] + sub_cache = organisation.subscription_information_cache + starts_at = sub_cache.current_billing_term_starts_at or now - timedelta( + days=30 + ) + month_delta = relativedelta(now, starts_at).months - 1 + month_delta += relativedelta(now, starts_at).years * 12 + period_starts_at = relativedelta(months=month_delta) + starts_at + period_ends_at = relativedelta(months=month_delta + 1) + starts_at + date_start = f"-{(now - period_starts_at).days}d" + date_stop = f"-{(now - period_ends_at).days}d" + + case constants.NINETY_DAY_PERIOD: + period_starts_at = now - relativedelta(days=90) + period_ends_at = now + date_start = "-90d" + date_stop = "now()" + if settings.USE_POSTGRES_FOR_ANALYTICS: - return get_usage_data_from_local_db( - organisation, environment_id=environment_id, project_id=project_id - ) - return get_usage_data_from_influxdb( - organisation.id, environment_id=environment_id, project_id=project_id - ) + kwargs = { + "organisation": organisation, + "environment_id": environment_id, + "project_id": project_id, + } + + if period_starts_at: + assert period_ends_at + kwargs["date_start"] = period_starts_at + kwargs["date_stop"] = period_ends_at + + return get_usage_data_from_local_db(**kwargs) + + kwargs = { + "organisation_id": organisation.id, + "environment_id": environment_id, + "project_id": project_id, + } + + if date_start: + assert date_stop + kwargs["date_start"] = date_start + kwargs["date_stop"] = date_stop + + return get_usage_data_from_influxdb(**kwargs) def get_usage_data_from_local_db( - organisation, environment_id=None, project_id=None, period: int = 30 + organisation: Organisation, + environment_id: int | None = None, + project_id: int | None = None, + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> List[UsageData]: + if date_start is None: + date_start = timezone.now() - timedelta(days=30) + if date_stop is None: + date_stop = timezone.now() + qs = APIUsageBucket.objects.filter( environment_id__in=_get_environment_ids_for_org(organisation), - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, ) if project_id: qs = qs.filter(project_id=project_id) @@ -50,8 +123,8 @@ def get_usage_data_from_local_db( qs = ( qs.filter( - created_at__date__lte=timezone.now(), - created_at__date__gt=timezone.now() - timedelta(days=30), + created_at__date__lte=date_stop, + created_at__date__gt=date_start, ) .order_by("created_at__date") .values("created_at__date", "resource") @@ -80,7 +153,7 @@ def get_total_events_count(organisation) -> int: environment_id__in=_get_environment_ids_for_org(organisation), created_at__date__lte=date.today(), created_at__date__gt=date.today() - timedelta(days=30), - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, ).aggregate(total_count=Sum("total_count"))["total_count"] else: count = get_events_for_organisation(organisation.id) @@ -105,7 +178,7 @@ def get_feature_evaluation_data_from_local_db( feature_evaluation_data = ( FeatureEvaluationBucket.objects.filter( environment_id=environment_id, - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, feature_name=feature.name, created_at__date__lte=timezone.now(), created_at__date__gt=timezone.now() - timedelta(days=period), diff --git a/api/app_analytics/constants.py b/api/app_analytics/constants.py new file mode 100644 index 000000000000..27632a2b98c3 --- /dev/null +++ b/api/app_analytics/constants.py @@ -0,0 +1,6 @@ +ANALYTICS_READ_BUCKET_SIZE = 15 + +# get_usage_data() related period constants +CURRENT_BILLING_PERIOD = "current_billing_period" +PREVIOUS_BILLING_PERIOD = "previous_billing_period" +NINETY_DAY_PERIOD = "90_day_period" diff --git a/api/app_analytics/influxdb_wrapper.py b/api/app_analytics/influxdb_wrapper.py index 05ea0218b6e1..0e9f65bc1d6c 100644 --- a/api/app_analytics/influxdb_wrapper.py +++ b/api/app_analytics/influxdb_wrapper.py @@ -76,7 +76,7 @@ def write(self): @staticmethod def influx_query_manager( - date_range: str = "30d", + date_start: str = "-30d", date_stop: str = "now()", drop_columns: typing.Tuple[str, ...] = DEFAULT_DROP_COLUMNS, filters: str = "|> filter(fn:(r) => r._measurement == 'api_call')", @@ -88,7 +88,7 @@ def influx_query_manager( query = ( f'from(bucket:"{bucket}")' - f" |> range(start: -{date_range}, stop: {date_stop})" + f" |> range(start: {date_start}, stop: {date_stop})" f" {filters}" f" |> drop(columns: {drop_columns_input})" f"{extra}" @@ -103,7 +103,9 @@ def influx_query_manager( return [] -def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> int: +def get_events_for_organisation( + organisation_id: id, date_start: str = "-30d", date_stop: str = "now()" +) -> int: """ Query influx db for usage for given organisation id @@ -126,7 +128,8 @@ def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> "environment_id", ), extra="|> sum()", - date_range=date_range, + date_start=date_start, + date_stop=date_stop, ) total = 0 @@ -137,7 +140,9 @@ def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> return total -def get_event_list_for_organisation(organisation_id: int, date_range: str = "30d"): +def get_event_list_for_organisation( + organisation_id: int, date_start: str = "-30d", date_stop: str = "now()" +) -> tuple[dict[str, list[int]], list[str]]: """ Query influx db for usage for given organisation id @@ -149,14 +154,20 @@ def get_event_list_for_organisation(organisation_id: int, date_range: str = "30d filters=f'|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["organisation_id"] == "{organisation_id}")', extra="|> aggregateWindow(every: 24h, fn: sum)", - date_range=date_range, + date_start=date_start, + date_stop=date_stop, ) dataset = defaultdict(list) labels = [] for result in results: for record in result.records: dataset[record["resource"]].append(record["_value"]) - required_records = int(date_range[:-1]) + 1 + if date_stop == "now()": + required_records = abs(int(date_start[:-1])) + 1 + else: + required_records = ( + abs(int(date_start[:-1])) - abs(int(date_stop[:-1])) + 1 + ) if len(labels) != required_records: labels.append(record.values["_time"].strftime("%Y-%m-%d")) return dataset, labels @@ -166,7 +177,9 @@ def get_multiple_event_list_for_organisation( organisation_id: int, project_id: int = None, environment_id: int = None, -): + date_start: str = "-30d", + date_stop: str = "now()", +) -> list[UsageData]: """ Query influx db for usage for given organisation id @@ -176,7 +189,6 @@ def get_multiple_event_list_for_organisation( :return: a number of requests for flags, traits, identities, environment-document """ - filters = [ 'r._measurement == "api_call"', f'r["organisation_id"] == "{organisation_id}"', @@ -189,6 +201,8 @@ def get_multiple_event_list_for_organisation( filters.append(f'r["environment_id"] == "{environment_id}"') results = InfluxDBWrapper.influx_query_manager( + date_start=date_start, + date_stop=date_stop, filters=build_filter_string(filters), extra="|> aggregateWindow(every: 24h, fn: sum)", ) @@ -201,14 +215,23 @@ def get_multiple_event_list_for_organisation( for i, record in enumerate(result.records): dataset[i][record.values["resource"].capitalize()] = record.values["_value"] dataset[i]["name"] = record.values["_time"].strftime("%Y-%m-%d") + return dataset def get_usage_data( - organisation_id: int, project_id: int = None, environment_id=None -) -> typing.List[UsageData]: + organisation_id: int, + project_id: int | None = None, + environment_id: int | None = None, + date_start: str = "-30d", + date_stop: str = "now()", +) -> list[UsageData]: events_list = get_multiple_event_list_for_organisation( - organisation_id, project_id, environment_id + organisation_id=organisation_id, + project_id=project_id, + environment_id=environment_id, + date_start=date_start, + date_stop=date_stop, ) return UsageDataSchema(many=True).load(events_list) @@ -216,9 +239,9 @@ def get_usage_data( def get_multiple_event_list_for_feature( environment_id: int, feature_name: str, - period: str = "30d", + date_start: str = "-30d", aggregate_every: str = "24h", -) -> typing.List[dict]: +) -> list[dict]: """ Get aggregated request data for the given feature in a given environment across all time, aggregated into time windows of length defined by the period argument. @@ -237,14 +260,14 @@ def get_multiple_event_list_for_feature( :param environment_id: an id of the environment to get usage for :param feature_name: the name of the feature to get usage for - :param period: the influx time period to filter on, e.g. 30d, 7d, etc. + :param date_start: the influx time period to filter on, e.g. -30d, -7d, etc. :param aggregate_every: the influx time period to aggregate the data by, e.g. 24h :return: a list of dicts with feature and request count in a specific environment """ results = InfluxDBWrapper.influx_query_manager( - date_range=period, + date_start=date_start, filters=f'|> filter(fn:(r) => r._measurement == "feature_evaluation") \ |> filter(fn: (r) => r["_field"] == "request_count") \ |> filter(fn: (r) => r["environment_id"] == "{environment_id}") \ @@ -271,16 +294,18 @@ def get_feature_evaluation_data( feature_name: str, environment_id: int, period: str = "30d" ) -> typing.List[FeatureEvaluationData]: data = get_multiple_event_list_for_feature( - feature_name=feature_name, environment_id=environment_id, period=period + feature_name=feature_name, + environment_id=environment_id, + date_start=f"-{period}", ) return FeatureEvaluationDataSchema(many=True).load(data) -def get_top_organisations(date_range: str, limit: str = ""): +def get_top_organisations(date_start: str, limit: str = ""): """ Query influx db top used organisations - :param date_range: data range for top organisations + :param date_start: Start of the date range for top organisations :param limit: limit for query @@ -289,9 +314,9 @@ def get_top_organisations(date_range: str, limit: str = ""): if limit: limit = f"|> limit(n:{limit})" - bucket = range_bucket_mappings[date_range] + bucket = range_bucket_mappings[date_start] results = InfluxDBWrapper.influx_query_manager( - date_range=date_range, + date_start=date_start, bucket=bucket, filters='|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["_field"] == "request_count")', @@ -318,7 +343,7 @@ def get_top_organisations(date_range: str, limit: str = ""): return dataset -def get_current_api_usage(organisation_id: int, date_range: str) -> int: +def get_current_api_usage(organisation_id: int, date_start: str) -> int: """ Query influx db for api usage @@ -330,7 +355,7 @@ def get_current_api_usage(organisation_id: int, date_range: str) -> int: bucket = read_bucket results = InfluxDBWrapper.influx_query_manager( - date_range=date_range, + date_start=date_start, bucket=bucket, filters=build_filter_string( [ diff --git a/api/app_analytics/management/commands/migrate_analytics.py b/api/app_analytics/management/commands/migrate_analytics.py new file mode 100644 index 000000000000..cb01f1f41c9a --- /dev/null +++ b/api/app_analytics/management/commands/migrate_analytics.py @@ -0,0 +1,19 @@ +import argparse +from typing import Any + +from app_analytics.migrate_to_pg import migrate_feature_evaluations +from django.core.management import BaseCommand + + +class Command(BaseCommand): + def add_arguments(self, parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "--migrate-till", + type=int, + dest="migrate_till", + help="Migrate data till n days ago", + default=30, + ) + + def handle(self, *args: Any, migrate_till: int, **options: Any) -> None: + migrate_feature_evaluations(migrate_till) diff --git a/api/app_analytics/migrate_to_pg.py b/api/app_analytics/migrate_to_pg.py new file mode 100644 index 000000000000..c90ddbc17fdd --- /dev/null +++ b/api/app_analytics/migrate_to_pg.py @@ -0,0 +1,28 @@ +from app_analytics.constants import ANALYTICS_READ_BUCKET_SIZE +from app_analytics.influxdb_wrapper import influxdb_client, read_bucket +from app_analytics.models import FeatureEvaluationBucket + + +def migrate_feature_evaluations(migrate_till: int = 30) -> None: + query_api = influxdb_client.query_api() + + for i in range(migrate_till): + range_start = f"-{i+1}d" + range_stop = f"-{i}d" + query = f"from (bucket: {read_bucket}) |> range(start: {range_start}, stop: {range_stop})" + + result = query_api.query(query) + + feature_evaluations = [] + for table in result: + for record in table.records: + feature_evaluations.append( + FeatureEvaluationBucket( + feature_name=record.values["feature_id"], + bucket_size=ANALYTICS_READ_BUCKET_SIZE, + created_at=record.get_time(), + total_count=record.get_value(), + environment_id=record.values["environment_id"], + ) + ) + FeatureEvaluationBucket.objects.bulk_create(feature_evaluations) diff --git a/api/app_analytics/serializers.py b/api/app_analytics/serializers.py index cca9534fb460..c9512b5a7222 100644 --- a/api/app_analytics/serializers.py +++ b/api/app_analytics/serializers.py @@ -1,5 +1,9 @@ +import typing + from rest_framework import serializers +from .types import PERIOD_TYPE + class UsageDataSerializer(serializers.Serializer): flags = serializers.IntegerField() @@ -12,6 +16,12 @@ class UsageDataSerializer(serializers.Serializer): class UsageDataQuerySerializer(serializers.Serializer): project_id = serializers.IntegerField(required=False) environment_id = serializers.IntegerField(required=False) + period = serializers.ChoiceField( + choices=typing.get_args(PERIOD_TYPE), + allow_null=True, + default=None, + required=False, + ) class UsageTotalCountSerializer(serializers.Serializer): diff --git a/api/app_analytics/tasks.py b/api/app_analytics/tasks.py index 4fbc203dd1bd..0df21b9d5c4e 100644 --- a/api/app_analytics/tasks.py +++ b/api/app_analytics/tasks.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from typing import List, Tuple -from app_analytics.analytics_db_service import ANALYTICS_READ_BUCKET_SIZE +from app_analytics.constants import ANALYTICS_READ_BUCKET_SIZE from django.conf import settings from django.db.models import Count, Q, Sum from django.utils import timezone diff --git a/api/app_analytics/types.py b/api/app_analytics/types.py new file mode 100644 index 000000000000..5472d52b0f8a --- /dev/null +++ b/api/app_analytics/types.py @@ -0,0 +1,9 @@ +from typing import Literal + +from . import constants + +PERIOD_TYPE = Literal[ + constants.CURRENT_BILLING_PERIOD, + constants.PREVIOUS_BILLING_PERIOD, + constants.NINETY_DAY_PERIOD, +] diff --git a/api/app_analytics/views.py b/api/app_analytics/views.py index b619c89729ff..d89b76e3fb97 100644 --- a/api/app_analytics/views.py +++ b/api/app_analytics/views.py @@ -150,7 +150,11 @@ def post(self, request, *args, **kwargs): ) ) elif settings.INFLUXDB_TOKEN: - track_feature_evaluation_influxdb.delay( + # Due to load issues on the task processor, we + # explicitly run this task in a separate thread. + # TODO: batch influx data to prevent large amounts + # of tasks. + track_feature_evaluation_influxdb.run_in_thread( args=( request.environment.id, request.data, diff --git a/api/audit/constants.py b/api/audit/constants.py index 39672cf2aae2..c2bb05197143 100644 --- a/api/audit/constants.py +++ b/api/audit/constants.py @@ -58,4 +58,6 @@ CHANGE_REQUEST_COMMITTED_MESSAGE = "Change Request: %s committed" CHANGE_REQUEST_DELETED_MESSAGE = "Change Request: %s deleted" +ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE = "New version published for feature: %s" + DATETIME_FORMAT = "%d/%m/%Y %H:%M:%S" diff --git a/api/audit/related_object_type.py b/api/audit/related_object_type.py index 3c15d5858901..6fab2460890b 100644 --- a/api/audit/related_object_type.py +++ b/api/audit/related_object_type.py @@ -9,3 +9,4 @@ class RelatedObjectType(enum.Enum): CHANGE_REQUEST = "Change request" EDGE_IDENTITY = "Edge Identity" IMPORT_REQUEST = "Import request" + EF_VERSION = "Environment feature version" diff --git a/api/audit/serializers.py b/api/audit/serializers.py index 202ef42f800f..70e975cfaa2e 100644 --- a/api/audit/serializers.py +++ b/api/audit/serializers.py @@ -24,6 +24,7 @@ class Meta: "environment", "project", "related_object_id", + "related_object_uuid", "related_object_type", "is_system_event", ) @@ -52,6 +53,7 @@ class Meta: "environment", "project", "related_object_id", + "related_object_uuid", "related_object_type", "is_system_event", "change_details", diff --git a/api/audit/tasks.py b/api/audit/tasks.py index 3d60c10927a7..0863580a50e3 100644 --- a/api/audit/tasks.py +++ b/api/audit/tasks.py @@ -159,9 +159,16 @@ def create_segment_priorities_changed_audit_log( if not feature_segments: return - # all feature segments should have the same value for feature and environment + # all feature segments should have the same value for feature, environment and + # environment feature version environment = feature_segments[0].environment feature = feature_segments[0].feature + environment_feature_version_id = feature_segments[0].environment_feature_version_id + + if environment_feature_version_id is not None: + # Don't create audit logs for FeatureSegments wrapped in a version + # as this is handled by the feature history instead. + return AuditLog.objects.create( log=f"Segment overrides re-ordered for feature '{feature.name}'.", diff --git a/api/conftest.py b/api/conftest.py index 583e60607713..a799dfbf43d0 100644 --- a/api/conftest.py +++ b/api/conftest.py @@ -1,5 +1,6 @@ import os import typing +from unittest.mock import MagicMock import boto3 import pytest @@ -11,6 +12,7 @@ from moto import mock_dynamodb from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource, Table from pytest_django.plugin import blocking_manager_key +from pytest_mock import MockerFixture from rest_framework.authtoken.models import Token from rest_framework.test import APIClient from urllib3.connectionpool import HTTPConnectionPool @@ -85,6 +87,25 @@ def pytest_sessionstart(session: pytest.Session) -> None: fix_issue_3869() +@pytest.fixture() +def post_request_mock(mocker: MockerFixture) -> MagicMock: + def mocked_request(*args, **kwargs) -> None: + class MockResponse: + def __init__(self, json_data: str, status_code: int) -> None: + self.json_data = json_data + self.status_code = status_code + + def raise_for_status(self) -> None: + pass + + def json(self) -> str: + return self.json_data + + return MockResponse(json_data={"data": "data"}, status_code=200) + + return mocker.patch("requests.post", side_effect=mocked_request) + + @pytest.hookimpl(trylast=True) def pytest_configure(config: pytest.Config) -> None: if ( @@ -563,7 +584,7 @@ def feature_with_value_segment( @pytest.fixture() -def segment_featurestate_and_feature_with_value( +def segment_override_for_feature_with_value( feature_with_value_segment: FeatureSegment, feature_with_value: Feature, environment: Environment, @@ -966,20 +987,35 @@ def flagsmith_environments_v2_table(dynamodb: DynamoDBServiceResource) -> Table: @pytest.fixture() -def feature_external_resource(feature: Feature) -> FeatureExternalResource: +def feature_external_resource( + feature: Feature, post_request_mock: MagicMock, mocker: MockerFixture +) -> FeatureExternalResource: + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", + ) + return FeatureExternalResource.objects.create( - url="https://github.com/userexample/example-project-repo/issues/11", + url="https://github.com/repositoryownertest/repositorynametest/issues/11", type="GITHUB_ISSUE", feature=feature, + metadata='{"status": "open"}', ) @pytest.fixture() def feature_with_value_external_resource( feature_with_value: Feature, + post_request_mock: MagicMock, + mocker: MockerFixture, ) -> FeatureExternalResource: + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", + ) + return FeatureExternalResource.objects.create( - url="https://github.com/userexample/example-project-repo/issues/11", + url="https://github.com/repositoryownertest/repositorynametest/issues/11", type="GITHUB_ISSUE", feature=feature_with_value, ) @@ -1011,10 +1047,28 @@ def github_repository( "admin_master_api_key_client", ] ) -def admin_client_new(request, admin_client_original, admin_master_api_key_client): +def admin_client_new( + request: pytest.FixtureRequest, + admin_client_original: APIClient, + admin_master_api_key_client: APIClient, +) -> APIClient: if request.param == "admin_client_original": yield admin_client_original elif request.param == "admin_master_api_key_client": yield admin_master_api_key_client else: assert False, "Request param mismatch" + + +@pytest.fixture() +def superuser(): + return FFAdminUser.objects.create_superuser( + email="superuser@example.com", + password=FFAdminUser.objects.make_random_password(), + ) + + +@pytest.fixture() +def superuser_client(superuser: FFAdminUser, client: APIClient): + client.force_login(superuser, backend="django.contrib.auth.backends.ModelBackend") + return client diff --git a/api/custom_auth/mfa/backends/application.py b/api/custom_auth/mfa/backends/application.py index 3d81db4d4000..7a6db0df57a2 100644 --- a/api/custom_auth/mfa/backends/application.py +++ b/api/custom_auth/mfa/backends/application.py @@ -1,8 +1,28 @@ -from trench.backends.application import ApplicationBackend +from typing import Any, Dict +from django.conf import settings +from pyotp import TOTP +from rest_framework.response import Response + +from custom_auth.mfa.trench.models import MFAMethod + + +class CustomApplicationBackend: + def __init__(self, mfa_method: MFAMethod, config: Dict[str, Any]) -> None: + self._mfa_method = mfa_method + self._config = config + self._totp = TOTP(self._mfa_method.secret) -class CustomApplicationBackend(ApplicationBackend): def dispatch_message(self): - original_message = super(CustomApplicationBackend, self).dispatch_message() - data = {**original_message, "secret": self.obj.secret} - return data + qr_link = self._totp.provisioning_uri( + self._mfa_method.user.email, settings.TRENCH_AUTH["APPLICATION_ISSUER_NAME"] + ) + data = { + "qr_link": qr_link, + "secret": self._mfa_method.secret, + } + return Response(data) + + def validate_code(self, code: str) -> bool: + validity_period = settings.TRENCH_AUTH["MFA_METHODS"]["app"]["VALIDITY_PERIOD"] + return self._totp.verify(otp=code, valid_window=int(validity_period / 20)) diff --git a/api/custom_auth/mfa/trench/__init__.py b/api/custom_auth/mfa/trench/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/api/custom_auth/mfa/trench/admin.py b/api/custom_auth/mfa/trench/admin.py new file mode 100644 index 000000000000..aaf617bb7714 --- /dev/null +++ b/api/custom_auth/mfa/trench/admin.py @@ -0,0 +1,8 @@ +from django.contrib import admin + +from custom_auth.mfa.trench.models import MFAMethod + + +@admin.register(MFAMethod) +class MFAMethodAdmin(admin.ModelAdmin): + pass diff --git a/api/custom_auth/mfa/trench/apps.py b/api/custom_auth/mfa/trench/apps.py new file mode 100644 index 000000000000..9d65b184cdfd --- /dev/null +++ b/api/custom_auth/mfa/trench/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class TrenchConfig(AppConfig): + name = "custom_auth.mfa.trench" + verbose_name = "django-trench" diff --git a/api/custom_auth/mfa/trench/backends/__init__.py b/api/custom_auth/mfa/trench/backends/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/api/custom_auth/mfa/trench/command/__init__.py b/api/custom_auth/mfa/trench/command/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/api/custom_auth/mfa/trench/command/activate_mfa_method.py b/api/custom_auth/mfa/trench/command/activate_mfa_method.py new file mode 100644 index 000000000000..e357f4525a55 --- /dev/null +++ b/api/custom_auth/mfa/trench/command/activate_mfa_method.py @@ -0,0 +1,37 @@ +from typing import Callable, Set, Type + +from custom_auth.mfa.trench.command.generate_backup_codes import ( + generate_backup_codes_command, +) +from custom_auth.mfa.trench.command.replace_mfa_method_backup_codes import ( + regenerate_backup_codes_for_mfa_method_command, +) +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_model + + +class ActivateMFAMethodCommand: + def __init__( + self, mfa_model: Type[MFAMethod], backup_codes_generator: Callable + ) -> None: + self._mfa_model = mfa_model + self._backup_codes_generator = backup_codes_generator + + def execute(self, user_id: int, name: str, code: str) -> Set[str]: + self._mfa_model.objects.filter(user_id=user_id, name=name).update( + is_active=True, + is_primary=not self._mfa_model.objects.primary_exists(user_id=user_id), + ) + + backup_codes = regenerate_backup_codes_for_mfa_method_command( + user_id=user_id, + name=name, + ) + + return backup_codes + + +activate_mfa_method_command = ActivateMFAMethodCommand( + mfa_model=get_mfa_model(), + backup_codes_generator=generate_backup_codes_command, +).execute diff --git a/api/custom_auth/mfa/trench/command/authenticate_second_factor.py b/api/custom_auth/mfa/trench/command/authenticate_second_factor.py new file mode 100644 index 000000000000..35912eb5cd4f --- /dev/null +++ b/api/custom_auth/mfa/trench/command/authenticate_second_factor.py @@ -0,0 +1,36 @@ +from custom_auth.mfa.trench.command.remove_backup_code import ( + remove_backup_code_command, +) +from custom_auth.mfa.trench.command.validate_backup_code import ( + validate_backup_code_command, +) +from custom_auth.mfa.trench.exceptions import ( + InvalidCodeError, + InvalidTokenError, +) +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_handler, user_token_generator +from users.models import FFAdminUser + + +def is_authenticated(user_id: int, code: str) -> None: + for auth_method in MFAMethod.objects.list_active(user_id=user_id): + validated_backup_code = validate_backup_code_command( + value=code, backup_codes=auth_method.backup_codes + ) + if get_mfa_handler(mfa_method=auth_method).validate_code(code=code): + return + if validated_backup_code: + remove_backup_code_command( + user_id=auth_method.user_id, method_name=auth_method.name, code=code + ) + return + raise InvalidCodeError() + + +def authenticate_second_step_command(code: str, ephemeral_token: str) -> FFAdminUser: + user = user_token_generator.check_token(user=None, token=ephemeral_token) + if user is None: + raise InvalidTokenError() + is_authenticated(user_id=user.id, code=code) + return user diff --git a/api/custom_auth/mfa/trench/command/create_mfa_method.py b/api/custom_auth/mfa/trench/command/create_mfa_method.py new file mode 100644 index 000000000000..1d966e5a1d11 --- /dev/null +++ b/api/custom_auth/mfa/trench/command/create_mfa_method.py @@ -0,0 +1,30 @@ +from typing import Callable, Type + +from custom_auth.mfa.trench.command.create_secret import create_secret_command +from custom_auth.mfa.trench.exceptions import MFAMethodAlreadyActiveError +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_model + + +class CreateMFAMethodCommand: + def __init__(self, secret_generator: Callable, mfa_model: Type[MFAMethod]) -> None: + self._mfa_model = mfa_model + self._create_secret = secret_generator + + def execute(self, user_id: int, name: str) -> MFAMethod: + mfa, created = self._mfa_model.objects.get_or_create( + user_id=user_id, + name=name, + defaults={ + "secret": self._create_secret, + "is_active": False, + }, + ) + if not created and mfa.is_active: + raise MFAMethodAlreadyActiveError() + return mfa + + +create_mfa_method_command = CreateMFAMethodCommand( + secret_generator=create_secret_command, mfa_model=get_mfa_model() +).execute diff --git a/api/custom_auth/mfa/trench/command/create_secret.py b/api/custom_auth/mfa/trench/command/create_secret.py new file mode 100644 index 000000000000..ea2669432a8d --- /dev/null +++ b/api/custom_auth/mfa/trench/command/create_secret.py @@ -0,0 +1,7 @@ +from django.conf import settings +from pyotp import random_base32 + + +def create_secret_command() -> str: + generator = random_base32 + return generator(length=settings.TRENCH_AUTH["SECRET_KEY_LENGTH"]) diff --git a/api/custom_auth/mfa/trench/command/deactivate_mfa_method.py b/api/custom_auth/mfa/trench/command/deactivate_mfa_method.py new file mode 100644 index 000000000000..af7422d57d99 --- /dev/null +++ b/api/custom_auth/mfa/trench/command/deactivate_mfa_method.py @@ -0,0 +1,27 @@ +from typing import Type + +from django.db.transaction import atomic + +from custom_auth.mfa.trench.exceptions import MFANotEnabledError +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_model + + +class DeactivateMFAMethodCommand: + def __init__(self, mfa_model: Type[MFAMethod]) -> None: + self._mfa_model = mfa_model + + @atomic + def execute(self, mfa_method_name: str, user_id: int) -> None: + mfa = self._mfa_model.objects.get_by_name(user_id=user_id, name=mfa_method_name) + if not mfa.is_active: + raise MFANotEnabledError() + + self._mfa_model.objects.filter(user_id=user_id, name=mfa_method_name).update( + is_active=False, is_primary=False + ) + + +deactivate_mfa_method_command = DeactivateMFAMethodCommand( + mfa_model=get_mfa_model() +).execute diff --git a/api/custom_auth/mfa/trench/command/generate_backup_codes.py b/api/custom_auth/mfa/trench/command/generate_backup_codes.py new file mode 100644 index 000000000000..fa71c97e89c3 --- /dev/null +++ b/api/custom_auth/mfa/trench/command/generate_backup_codes.py @@ -0,0 +1,38 @@ +from typing import Callable, Set + +from django.conf import settings +from django.utils.crypto import get_random_string + + +class GenerateBackupCodesCommand: + def __init__(self, random_string_generator: Callable) -> None: + self._random_string_generator = random_string_generator + + def execute( + self, + quantity: int = settings.TRENCH_AUTH["BACKUP_CODES_QUANTITY"], + length: int = settings.TRENCH_AUTH["BACKUP_CODES_LENGTH"], + allowed_chars: str = settings.TRENCH_AUTH["BACKUP_CODES_CHARACTERS"], + ) -> Set[str]: + """ + Generates random encrypted backup codes. + + :param quantity: How many codes should be generated + :type quantity: int + :param length: How long codes should be + :type length: int + :param allowed_chars: Characters to create backup codes from + :type allowed_chars: str + + :returns: Encrypted backup codes + :rtype: set[str] + """ + return { + self._random_string_generator(length, allowed_chars) + for _ in range(quantity) + } + + +generate_backup_codes_command = GenerateBackupCodesCommand( + random_string_generator=get_random_string, +).execute diff --git a/api/custom_auth/mfa/trench/command/remove_backup_code.py b/api/custom_auth/mfa/trench/command/remove_backup_code.py new file mode 100644 index 000000000000..d8d58f11bbe5 --- /dev/null +++ b/api/custom_auth/mfa/trench/command/remove_backup_code.py @@ -0,0 +1,29 @@ +from typing import Any, Optional, Set + +from django.contrib.auth.hashers import check_password + +from custom_auth.mfa.trench.models import MFAMethod + + +def remove_backup_code_command(user_id: Any, method_name: str, code: str) -> None: + serialized_codes = ( + MFAMethod.objects.filter(user_id=user_id, name=method_name) + .values_list("_backup_codes", flat=True) + .first() + ) + codes = MFAMethod._BACKUP_CODES_DELIMITER.join( + _remove_code_from_set( + backup_codes=set(serialized_codes.split(MFAMethod._BACKUP_CODES_DELIMITER)), + code=code, + ) + ) + MFAMethod.objects.filter(user_id=user_id, name=method_name).update( + _backup_codes=codes + ) + + +def _remove_code_from_set(backup_codes: Set[str], code: str) -> Optional[Set[str]]: + for backup_code in backup_codes: + if check_password(code, backup_code): + backup_codes.remove(backup_code) + return backup_codes diff --git a/api/custom_auth/mfa/trench/command/replace_mfa_method_backup_codes.py b/api/custom_auth/mfa/trench/command/replace_mfa_method_backup_codes.py new file mode 100644 index 000000000000..27564bc3c46d --- /dev/null +++ b/api/custom_auth/mfa/trench/command/replace_mfa_method_backup_codes.py @@ -0,0 +1,39 @@ +from typing import Callable, Set, Type + +from django.contrib.auth.hashers import make_password + +from custom_auth.mfa.trench.command.generate_backup_codes import ( + generate_backup_codes_command, +) +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_model + + +class RegenerateBackupCodesForMFAMethodCommand: + def __init__( + self, + mfa_model: Type[MFAMethod], + code_hasher: Callable, + codes_generator: Callable, + ) -> None: + self._mfa_model = mfa_model + self._code_hasher = code_hasher + self._codes_generator = codes_generator + + def execute(self, user_id: int, name: str) -> Set[str]: + backup_codes = self._codes_generator() + self._mfa_model.objects.filter(user_id=user_id, name=name).update( + _backup_codes=MFAMethod._BACKUP_CODES_DELIMITER.join( + [self._code_hasher(backup_code) for backup_code in backup_codes] + ), + ) + return backup_codes + + +regenerate_backup_codes_for_mfa_method_command = ( + RegenerateBackupCodesForMFAMethodCommand( + mfa_model=get_mfa_model(), + code_hasher=make_password, + codes_generator=generate_backup_codes_command, + ).execute +) diff --git a/api/custom_auth/mfa/trench/command/validate_backup_code.py b/api/custom_auth/mfa/trench/command/validate_backup_code.py new file mode 100644 index 000000000000..bee5bfc56c0a --- /dev/null +++ b/api/custom_auth/mfa/trench/command/validate_backup_code.py @@ -0,0 +1,10 @@ +from typing import Iterable, Optional + +from django.contrib.auth.hashers import check_password + + +def validate_backup_code_command(value: str, backup_codes: Iterable) -> Optional[str]: + for backup_code in backup_codes: + if check_password(value, backup_code): + return backup_code + return None diff --git a/api/custom_auth/mfa/trench/exceptions.py b/api/custom_auth/mfa/trench/exceptions.py new file mode 100644 index 000000000000..aa6b3916e7e9 --- /dev/null +++ b/api/custom_auth/mfa/trench/exceptions.py @@ -0,0 +1,46 @@ +from django.utils.translation import gettext_lazy as _ +from rest_framework.serializers import ValidationError + + +class MFAValidationError(ValidationError): + def __str__(self) -> str: + return ", ".join(detail for detail in self.detail) + + +class CodeInvalidOrExpiredError(MFAValidationError): + def __init__(self) -> None: + super().__init__( + detail=_("Code invalid or expired."), + code="code_invalid_or_expired", + ) + + +class MFAMethodDoesNotExistError(MFAValidationError): + def __init__(self) -> None: + super().__init__( + detail=_("Requested MFA method does not exist."), + code="mfa_method_does_not_exist", + ) + + +class MFAMethodAlreadyActiveError(MFAValidationError): + def __init__(self) -> None: + super().__init__( + detail=_("MFA method already active."), + code="method_already_active", + ) + + +class MFANotEnabledError(MFAValidationError): + def __init__(self) -> None: + super().__init__(detail=_("2FA is not enabled."), code="not_enabled") + + +class InvalidTokenError(MFAValidationError): + def __init__(self) -> None: + super().__init__(detail=_("Invalid or expired token."), code="invalid_token") + + +class InvalidCodeError(MFAValidationError): + def __init__(self) -> None: + super().__init__(detail=_("Invalid or expired code."), code="invalid_code") diff --git a/api/custom_auth/mfa/trench/migrations/0001_initial.py b/api/custom_auth/mfa/trench/migrations/0001_initial.py new file mode 100644 index 000000000000..cb6e1239ef69 --- /dev/null +++ b/api/custom_auth/mfa/trench/migrations/0001_initial.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2018-10-12 11:34 +from __future__ import unicode_literals + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name="MFAMethod", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "name", + models.CharField(max_length=255, verbose_name="name"), + ), + ( + "secret", + models.CharField(max_length=20, verbose_name="secret"), + ), + ( + "is_primary", + models.BooleanField(default=False, verbose_name="is primary"), + ), + ( + "is_active", + models.BooleanField(default=False, verbose_name="is active"), + ), + ( + "backup_codes", + models.CharField( + blank=True, max_length=255, verbose_name="backup codes" + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="mfa", + to=settings.AUTH_USER_MODEL, + verbose_name="user", + ), + ), + ], + options={ + "verbose_name": "MFA Method", + "verbose_name_plural": "MFA Methods", + }, + ), + ] diff --git a/api/custom_auth/mfa/trench/migrations/0002_auto_20190111_1403.py b/api/custom_auth/mfa/trench/migrations/0002_auto_20190111_1403.py new file mode 100644 index 000000000000..a95dc501fa5b --- /dev/null +++ b/api/custom_auth/mfa/trench/migrations/0002_auto_20190111_1403.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.15 on 2019-01-11 14:03 +from __future__ import unicode_literals + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("trench", "0001_initial"), + ] + + operations = [ + migrations.RenameField( + model_name="mfamethod", + old_name="backup_codes", + new_name="_backup_codes", + ), + migrations.AlterField( + model_name="mfamethod", + name="user", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="mfa_methods", + to=settings.AUTH_USER_MODEL, + verbose_name="user", + ), + ), + ] diff --git a/api/custom_auth/mfa/trench/migrations/0003_auto_20190213_2330.py b/api/custom_auth/mfa/trench/migrations/0003_auto_20190213_2330.py new file mode 100644 index 000000000000..39699eb4374f --- /dev/null +++ b/api/custom_auth/mfa/trench/migrations/0003_auto_20190213_2330.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.18 on 2019-02-13 23:30 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("trench", "0002_auto_20190111_1403"), + ] + + operations = [ + migrations.AlterField( + model_name="mfamethod", + name="secret", + field=models.CharField(max_length=255, verbose_name="secret"), + ), + migrations.AlterField( + model_name="mfamethod", + name="_backup_codes", + field=models.TextField(blank=True, verbose_name="backup codes"), + ), + ] diff --git a/api/custom_auth/mfa/trench/migrations/__init__.py b/api/custom_auth/mfa/trench/migrations/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/api/custom_auth/mfa/trench/models.py b/api/custom_auth/mfa/trench/models.py new file mode 100644 index 000000000000..ee6d7580f882 --- /dev/null +++ b/api/custom_auth/mfa/trench/models.py @@ -0,0 +1,61 @@ +from typing import Any, Iterable + +from django.conf import settings +from django.db.models import ( + CASCADE, + BooleanField, + CharField, + ForeignKey, + Manager, + Model, + QuerySet, + TextField, +) + +from custom_auth.mfa.trench.exceptions import MFAMethodDoesNotExistError + + +class MFAUserMethodManager(Manager): + def get_by_name(self, user_id: Any, name: str) -> "MFAMethod": + try: + return self.get(user_id=user_id, name=name) + except self.model.DoesNotExist: + raise MFAMethodDoesNotExistError() + + def get_primary_active(self, user_id: Any) -> "MFAMethod": + try: + return self.get(user_id=user_id, is_primary=True, is_active=True) + except self.model.DoesNotExist: + raise MFAMethodDoesNotExistError() + + def list_active(self, user_id: Any) -> QuerySet: + return self.filter(user_id=user_id, is_active=True) + + def primary_exists(self, user_id: Any) -> bool: + return self.filter(user_id=user_id, is_primary=True).exists() + + +class MFAMethod(Model): + _BACKUP_CODES_DELIMITER = "," + + user = ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=CASCADE, + verbose_name="user", + related_name="mfa_methods", + ) + name = CharField("name", max_length=255) + secret = CharField("secret", max_length=255) + is_primary = BooleanField("is primary", default=False) + is_active = BooleanField("is active", default=False) + _backup_codes = TextField("backup codes", blank=True) + + class Meta: + verbose_name = "MFA Method" + verbose_name_plural = "MFA Methods" + + objects = MFAUserMethodManager() + + @property + def backup_codes(self) -> Iterable[str]: + return self._backup_codes.split(self._BACKUP_CODES_DELIMITER) diff --git a/api/custom_auth/mfa/trench/responses.py b/api/custom_auth/mfa/trench/responses.py new file mode 100644 index 000000000000..ba3a7da0bc31 --- /dev/null +++ b/api/custom_auth/mfa/trench/responses.py @@ -0,0 +1,23 @@ +from rest_framework.response import Response +from rest_framework.status import HTTP_400_BAD_REQUEST + +from custom_auth.mfa.trench.exceptions import MFAValidationError + + +class DispatchResponse(Response): + _FIELD_DETAILS = "details" + + +class ErrorResponse(Response): + _FIELD_ERROR = "error" + + def __init__( + self, + error: MFAValidationError, + status: str = HTTP_400_BAD_REQUEST, + *args, + **kwargs, + ) -> None: + super().__init__( + data={self._FIELD_ERROR: str(error)}, status=status, *args, **kwargs + ) diff --git a/api/custom_auth/mfa/trench/serializers.py b/api/custom_auth/mfa/trench/serializers.py new file mode 100644 index 000000000000..60561b85dbac --- /dev/null +++ b/api/custom_auth/mfa/trench/serializers.py @@ -0,0 +1,52 @@ +from django.contrib.auth import get_user_model +from django.contrib.auth.models import AbstractUser +from rest_framework.fields import CharField +from rest_framework.serializers import ModelSerializer, Serializer + +from custom_auth.mfa.trench.exceptions import ( + CodeInvalidOrExpiredError, + MFAMethodAlreadyActiveError, +) +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.utils import get_mfa_handler, get_mfa_model + +User: AbstractUser = get_user_model() + + +class MFAMethodActivationConfirmationValidator(Serializer): + code = CharField() + + def __init__(self, mfa_method_name: str, user: User, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._user = user + self._mfa_method_name = mfa_method_name + + def validate_code(self, value: str) -> str: + mfa_model = get_mfa_model() + mfa = mfa_model.objects.get_by_name( + user_id=self._user.id, name=self._mfa_method_name + ) + self._validate_mfa_method(mfa) + + handler = get_mfa_handler(mfa) + + if handler.validate_code(value): + return value + + raise CodeInvalidOrExpiredError() + + @staticmethod + def _validate_mfa_method(mfa: MFAMethod) -> None: + if mfa.is_active: + raise MFAMethodAlreadyActiveError() + + +class CodeLoginSerializer(Serializer): + ephemeral_token = CharField() + code = CharField() + + +class UserMFAMethodSerializer(ModelSerializer): + class Meta: + model = get_mfa_model() + fields = ("name", "is_primary") diff --git a/api/custom_auth/mfa/trench/urls/__init__.py b/api/custom_auth/mfa/trench/urls/__init__.py new file mode 100644 index 000000000000..23983a23f331 --- /dev/null +++ b/api/custom_auth/mfa/trench/urls/__init__.py @@ -0,0 +1 @@ +from custom_auth.mfa.trench.urls.base import urlpatterns # noqa diff --git a/api/custom_auth/mfa/trench/urls/base.py b/api/custom_auth/mfa/trench/urls/base.py new file mode 100644 index 000000000000..8cc2cfc08760 --- /dev/null +++ b/api/custom_auth/mfa/trench/urls/base.py @@ -0,0 +1,35 @@ +from django.urls import path + +__all__ = [ + "urlpatterns", +] + +from custom_auth.mfa.trench.views import ( + ListUserActiveMFAMethods, + MFAMethodActivationView, + MFAMethodConfirmActivationView, + MFAMethodDeactivationView, +) + +urlpatterns = ( + path( + "/activate/", + MFAMethodActivationView.as_view(), + name="mfa-activate", + ), + path( + "/activate/confirm/", + MFAMethodConfirmActivationView.as_view(), + name="mfa-activate-confirm", + ), + path( + "/deactivate/", + MFAMethodDeactivationView.as_view(), + name="mfa-deactivate", + ), + path( + "mfa/user-active-methods/", + ListUserActiveMFAMethods.as_view(), + name="mfa-list-user-active-methods", + ), +) diff --git a/api/custom_auth/mfa/trench/utils.py b/api/custom_auth/mfa/trench/utils.py new file mode 100644 index 000000000000..b192b0b63909 --- /dev/null +++ b/api/custom_auth/mfa/trench/utils.py @@ -0,0 +1,70 @@ +from datetime import datetime +from typing import Optional, Type + +from django.conf import settings +from django.contrib.auth import get_user_model +from django.contrib.auth.models import AbstractUser +from django.contrib.auth.tokens import PasswordResetTokenGenerator +from django.utils.crypto import constant_time_compare, salted_hmac +from django.utils.http import base36_to_int, int_to_base36 + +from custom_auth.mfa.backends.application import CustomApplicationBackend +from custom_auth.mfa.trench.models import MFAMethod + +User: AbstractUser = get_user_model() + + +class UserTokenGenerator(PasswordResetTokenGenerator): + """ + Custom token generator: + - user pk in token + - expires after 15 minutes + - longer hash (40 instead of 20) + """ + + KEY_SALT = "django.contrib.auth.tokens.PasswordResetTokenGenerator" + SECRET = settings.SECRET_KEY + EXPIRY_TIME = 60 * 15 + + def make_token(self, user: User) -> str: + return self._make_token_with_timestamp(user, int(datetime.now().timestamp())) + + def check_token(self, user: User, token: str) -> Optional[User]: + user_model = get_user_model() + try: + token = str(token) + user_pk, ts_b36, token_hash = token.rsplit("-", 2) + ts = base36_to_int(ts_b36) + user = user_model._default_manager.get(pk=user_pk) + except (ValueError, TypeError, user_model.DoesNotExist): + return None + + if (datetime.now().timestamp() - ts) > self.EXPIRY_TIME: + return None # pragma: no cover + + if not constant_time_compare(self._make_token_with_timestamp(user, ts), token): + return None # pragma: no cover + + return user + + def _make_token_with_timestamp(self, user: User, timestamp: int, **kwargs) -> str: + ts_b36 = int_to_base36(timestamp) + token_hash = salted_hmac( + self.KEY_SALT, + self._make_hash_value(user, timestamp), + secret=self.SECRET, + ).hexdigest() + return f"{user.pk}-{ts_b36}-{token_hash}" + + +user_token_generator = UserTokenGenerator() + + +def get_mfa_model() -> Type[MFAMethod]: + return MFAMethod + + +def get_mfa_handler(mfa_method: MFAMethod) -> CustomApplicationBackend: + conf = settings.TRENCH_AUTH["MFA_METHODS"]["app"] + mfa_handler = CustomApplicationBackend(mfa_method=mfa_method, config=conf) + return mfa_handler diff --git a/api/custom_auth/mfa/trench/views/__init__.py b/api/custom_auth/mfa/trench/views/__init__.py new file mode 100644 index 000000000000..90081727ecac --- /dev/null +++ b/api/custom_auth/mfa/trench/views/__init__.py @@ -0,0 +1 @@ +from custom_auth.mfa.trench.views.base import * # noqa diff --git a/api/custom_auth/mfa/trench/views/base.py b/api/custom_auth/mfa/trench/views/base.py new file mode 100644 index 000000000000..97fde0d359ad --- /dev/null +++ b/api/custom_auth/mfa/trench/views/base.py @@ -0,0 +1,87 @@ +from django.contrib.auth import get_user_model +from django.contrib.auth.models import AbstractUser +from rest_framework import status +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST +from rest_framework.views import APIView + +from custom_auth.mfa.trench.command.activate_mfa_method import ( + activate_mfa_method_command, +) +from custom_auth.mfa.trench.command.create_mfa_method import ( + create_mfa_method_command, +) +from custom_auth.mfa.trench.command.deactivate_mfa_method import ( + deactivate_mfa_method_command, +) +from custom_auth.mfa.trench.exceptions import MFAValidationError +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.responses import ErrorResponse +from custom_auth.mfa.trench.serializers import ( + MFAMethodActivationConfirmationValidator, + UserMFAMethodSerializer, +) +from custom_auth.mfa.trench.utils import get_mfa_handler + +User: AbstractUser = get_user_model() + + +class MFAMethodActivationView(APIView): + permission_classes = (IsAuthenticated,) + + @staticmethod + def post(request: Request, method: str) -> Response: + if method != "app": + return Response(status=status.HTTP_404_NOT_FOUND) + try: + user = request.user + mfa = create_mfa_method_command( + user_id=user.id, + name=method, + ) + except MFAValidationError as cause: + return ErrorResponse(error=cause) + return get_mfa_handler(mfa_method=mfa).dispatch_message() + + +class MFAMethodConfirmActivationView(APIView): + permission_classes = (IsAuthenticated,) + + @staticmethod + def post(request: Request, method: str) -> Response: + serializer = MFAMethodActivationConfirmationValidator( + mfa_method_name=method, user=request.user, data=request.data + ) + if not serializer.is_valid(): + return Response(status=HTTP_400_BAD_REQUEST, data=serializer.errors) + backup_codes = activate_mfa_method_command( + user_id=request.user.id, + name=method, + code=serializer.validated_data["code"], + ) + return Response({"backup_codes": backup_codes}) + + +class MFAMethodDeactivationView(APIView): + permission_classes = (IsAuthenticated,) + + @staticmethod + def post(request: Request, method: str) -> Response: + try: + deactivate_mfa_method_command( + mfa_method_name=method, user_id=request.user.id + ) + return Response(status=HTTP_204_NO_CONTENT) + except MFAValidationError as cause: + return ErrorResponse(error=cause) + + +class ListUserActiveMFAMethods(APIView): + permission_classes = (IsAuthenticated,) + + def get(self, request, *args, **kwargs): + active_mfa_methods = MFAMethod.objects.filter(user=request.user, is_active=True) + serializer = UserMFAMethodSerializer(active_mfa_methods, many=True) + return Response(serializer.data) diff --git a/api/custom_auth/urls.py b/api/custom_auth/urls.py index 054b0a449423..54995ad5def7 100644 --- a/api/custom_auth/urls.py +++ b/api/custom_auth/urls.py @@ -1,4 +1,5 @@ from django.urls import include, path +from djoser.views import TokenDestroyView from rest_framework.routers import DefaultRouter from custom_auth.views import ( @@ -25,12 +26,12 @@ CustomAuthTokenLoginWithMFACode.as_view(), name="mfa-authtoken-login-code", ), + path("logout/", TokenDestroyView.as_view(), name="authtoken-logout"), path("", include(ffadmin_user_router.urls)), path("token/", delete_token, name="delete-token"), # NOTE: endpoints provided by `djoser.urls` # are deprecated and will be removed in the next Major release path("", include("djoser.urls")), - path("", include("trench.urls")), # MFA - path("", include("trench.urls.djoser")), # override necessary urls for MFA auth + path("", include("custom_auth.mfa.trench.urls")), # MFA path("oauth/", include("custom_auth.oauth.urls")), ] diff --git a/api/custom_auth/views.py b/api/custom_auth/views.py index 211a224fdc44..97381120fd93 100644 --- a/api/custom_auth/views.py +++ b/api/custom_auth/views.py @@ -1,25 +1,35 @@ +from django.conf import settings from django.contrib.auth import user_logged_out from django.utils.decorators import method_decorator -from djoser.views import UserViewSet +from djoser.views import TokenCreateView, UserViewSet from drf_yasg.utils import swagger_auto_schema from rest_framework import status from rest_framework.authtoken.models import Token from rest_framework.decorators import action, api_view, permission_classes from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.throttling import ScopedRateThrottle -from trench.views.authtoken import ( - AuthTokenLoginOrRequestMFACode, - AuthTokenLoginWithMFACode, -) +from custom_auth.mfa.backends.application import CustomApplicationBackend +from custom_auth.mfa.trench.command.authenticate_second_factor import ( + authenticate_second_step_command, +) +from custom_auth.mfa.trench.exceptions import ( + MFAMethodDoesNotExistError, + MFAValidationError, +) +from custom_auth.mfa.trench.models import MFAMethod +from custom_auth.mfa.trench.responses import ErrorResponse +from custom_auth.mfa.trench.serializers import CodeLoginSerializer +from custom_auth.mfa.trench.utils import user_token_generator from custom_auth.serializers import CustomUserDelete from users.constants import DEFAULT_DELETE_ORPHAN_ORGANISATIONS_VALUE from .models import UserPasswordResetRequest -class CustomAuthTokenLoginOrRequestMFACode(AuthTokenLoginOrRequestMFACode): +class CustomAuthTokenLoginOrRequestMFACode(TokenCreateView): """ Class to handle throttling for login requests """ @@ -27,8 +37,27 @@ class CustomAuthTokenLoginOrRequestMFACode(AuthTokenLoginOrRequestMFACode): throttle_classes = [ScopedRateThrottle] throttle_scope = "login" + def post(self, request: Request) -> Response: + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + user = serializer.user + try: + mfa_model = MFAMethod + mfa_method = mfa_model.objects.get_primary_active(user_id=user.id) + conf = settings.TRENCH_AUTH["MFA_METHODS"]["app"] + mfa_handler = CustomApplicationBackend(mfa_method=mfa_method, config=conf) + mfa_handler.dispatch_message() + return Response( + data={ + "ephemeral_token": user_token_generator.make_token(user), + "method": mfa_method.name, + } + ) + except MFAMethodDoesNotExistError: + return self._action(serializer) + -class CustomAuthTokenLoginWithMFACode(AuthTokenLoginWithMFACode): +class CustomAuthTokenLoginWithMFACode(TokenCreateView): """ Override class to add throttling """ @@ -36,6 +65,19 @@ class CustomAuthTokenLoginWithMFACode(AuthTokenLoginWithMFACode): throttle_classes = [ScopedRateThrottle] throttle_scope = "mfa_code" + def post(self, request: Request) -> Response: + serializer = CodeLoginSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + try: + user = authenticate_second_step_command( + code=serializer.validated_data["code"], + ephemeral_token=serializer.validated_data["ephemeral_token"], + ) + serializer.user = user + return self._action(serializer) + except MFAValidationError as cause: + return ErrorResponse(error=cause, status=status.HTTP_401_UNAUTHORIZED) + @api_view(["DELETE"]) @permission_classes([IsAuthenticated]) diff --git a/api/environments/models.py b/api/environments/models.py index 543e8a547276..dd58ce9fa605 100644 --- a/api/environments/models.py +++ b/api/environments/models.py @@ -41,6 +41,7 @@ from environments.managers import EnvironmentManager from features.models import Feature, FeatureSegment, FeatureState from features.multivariate.models import MultivariateFeatureStateValue +from features.versioning.models import EnvironmentFeatureVersion from metadata.models import Metadata from projects.models import Project from segments.models import Segment @@ -174,8 +175,31 @@ def clone(self, name: str, api_key: str = None) -> "Environment": # Since identities are closely tied to the environment # it does not make much sense to clone them, hence # only clone feature states without identities - for feature_state in self.feature_states.filter(identity=None): - feature_state.clone(clone, live_from=feature_state.live_from) + queryset = self.feature_states.filter(identity=None) + + if self.use_v2_feature_versioning: + # Grab the latest feature versions from the source environment. + latest_environment_feature_versions = ( + EnvironmentFeatureVersion.objects.get_latest_versions_as_queryset( + environment_id=self.id + ) + ) + + # Create a dictionary holding the environment feature versions (unique per feature) + # to use in the cloned environment. + clone_environment_feature_versions = { + efv.feature_id: efv.clone_to_environment(environment=clone) + for efv in latest_environment_feature_versions + } + + for feature_state in queryset.filter( + environment_feature_version__in=latest_environment_feature_versions + ): + clone_efv = clone_environment_feature_versions[feature_state.feature_id] + feature_state.clone(clone, environment_feature_version=clone_efv) + else: + for feature_state in queryset: + feature_state.clone(clone, live_from=feature_state.live_from) return clone diff --git a/api/environments/views.py b/api/environments/views.py index 8dcfd71b7e6e..4afc0ab52567 100644 --- a/api/environments/views.py +++ b/api/environments/views.py @@ -1,6 +1,6 @@ import logging -from django.db.models import Count +from django.db.models import Count, Q from django.utils.decorators import method_decorator from drf_yasg import openapi from drf_yasg.utils import no_body, swagger_auto_schema @@ -17,6 +17,7 @@ NestedEnvironmentPermissions, ) from environments.sdk.schemas import SDKEnvironmentDocumentModel +from features.versioning.models import EnvironmentFeatureVersion from features.versioning.tasks import ( disable_v2_versioning, enable_v2_versioning, @@ -108,9 +109,28 @@ def get_queryset(self): queryset = Environment.objects.all() if self.action == "retrieve": - queryset = queryset.annotate( - total_segment_overrides=Count("feature_segments") + # Since we don't have the environment at this stage, we would need to query the database + # regardless, so it seems worthwhile to just query the database for the latest versions + # and use their existence as a proxy to whether v2 feature versioning is enabled. + latest_versions = EnvironmentFeatureVersion.objects.get_latest_versions_by_environment_api_key( + environment_api_key=self.kwargs["api_key"] ) + if latest_versions: + # if there are latest versions (and hence v2 feature versioning is enabled), then + # we need to ensure that we're only counting the feature segments for those + # latest versions against the limits. + queryset = queryset.annotate( + total_segment_overrides=Count( + "feature_segments", + filter=Q( + feature_segments__environment_feature_version__in=latest_versions + ), + ) + ) + else: + queryset = queryset.annotate( + total_segment_overrides=Count("feature_segments") + ) return queryset diff --git a/api/features/feature_external_resources/models.py b/api/features/feature_external_resources/models.py index 3421f18985dc..6dcfc4d99a7f 100644 --- a/api/features/feature_external_resources/models.py +++ b/api/features/feature_external_resources/models.py @@ -1,7 +1,7 @@ import logging -from dataclasses import asdict from django.db import models +from django.db.models import Q from django_lifecycle import ( AFTER_SAVE, BEFORE_DELETE, @@ -9,21 +9,22 @@ hook, ) +from environments.models import Environment from features.models import Feature, FeatureState -from integrations.github.github import GithubData, generate_data -from integrations.github.tasks import call_github_app_webhook_for_feature_state +from integrations.github.github import call_github_task from organisations.models import Organisation from webhooks.webhooks import WebhookEventType logger = logging.getLogger(__name__) -class FeatureExternalResource(LifecycleModelMixin, models.Model): - class ResourceType(models.TextChoices): - # GitHub external resource types - GITHUB_ISSUE = "GITHUB_ISSUE", "GitHub Issue" - GITHUB_PR = "GITHUB_PR", "GitHub PR" +class ResourceType(models.TextChoices): + # GitHub external resource types + GITHUB_ISSUE = "GITHUB_ISSUE", "GitHub Issue" + GITHUB_PR = "GITHUB_PR", "GitHub PR" + +class FeatureExternalResource(LifecycleModelMixin, models.Model): url = models.URLField() type = models.CharField(max_length=20, choices=ResourceType.choices) @@ -50,43 +51,50 @@ class Meta: def execute_after_save_actions(self): # Add a comment to GitHub Issue/PR when feature is linked to the GH external resource if ( - github_configuration := Organisation.objects.prefetch_related( - "github_config" - ) + Organisation.objects.prefetch_related("github_config") .get(id=self.feature.project.organisation_id) .github_config.first() ): - feature_states = FeatureState.objects.filter( - feature_id=self.feature_id, identity_id__isnull=True + feature_states: list[FeatureState] = [] + + environments = Environment.objects.filter( + project_id=self.feature.project_id ) - feature_data: GithubData = generate_data( - github_configuration=github_configuration, - feature=self.feature, + + for environment in environments: + q = Q( + feature_id=self.feature_id, + identity__isnull=True, + ) + feature_states.extend( + FeatureState.objects.get_live_feature_states( + environment=environment, additional_filters=q + ) + ) + + call_github_task( + organisation_id=self.feature.project.organisation_id, type=WebhookEventType.FEATURE_EXTERNAL_RESOURCE_ADDED.value, + feature=self.feature, + segment_name=None, + url=None, feature_states=feature_states, ) - call_github_app_webhook_for_feature_state.delay( - args=(asdict(feature_data),), - ) - @hook(BEFORE_DELETE) def execute_before_save_actions(self) -> None: # Add a comment to GitHub Issue/PR when feature is unlinked to the GH external resource if ( - github_configuration := Organisation.objects.prefetch_related( - "github_config" - ) + Organisation.objects.prefetch_related("github_config") .get(id=self.feature.project.organisation_id) .github_config.first() ): - feature_data: GithubData = generate_data( - github_configuration=github_configuration, - feature=self.feature, + + call_github_task( + organisation_id=self.feature.project.organisation_id, type=WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value, + feature=self.feature, + segment_name=None, url=self.url, - ) - - call_github_app_webhook_for_feature_state.delay( - args=(asdict(feature_data),), + feature_states=None, ) diff --git a/api/features/feature_external_resources/views.py b/api/features/feature_external_resources/views.py index 822c3a07400e..c9636bba1132 100644 --- a/api/features/feature_external_resources/views.py +++ b/api/features/feature_external_resources/views.py @@ -1,13 +1,10 @@ -import re - -from django.db.utils import IntegrityError from django.shortcuts import get_object_or_404 from rest_framework import status, viewsets -from rest_framework.exceptions import ValidationError from rest_framework.response import Response from features.models import Feature from features.permissions import FeatureExternalResourcePermissions +from integrations.github.client import get_github_issue_pr_title_and_state from organisations.models import Organisation from .models import FeatureExternalResource @@ -25,6 +22,25 @@ def get_queryset(self): features_pk = self.kwargs["feature_pk"] return FeatureExternalResource.objects.filter(feature=features_pk) + # Override get list view to add github issue/pr name to each linked external resource + def list(self, request, *args, **kwargs) -> Response: + queryset = self.get_queryset() + serializer = self.get_serializer(queryset, many=True) + data = serializer.data + + # get organisation id from feature and get feature from validated data + organisation_id = get_object_or_404( + Feature.objects.filter(id=self.kwargs["feature_pk"]), + ).project.organisation_id + + for resource in data if isinstance(data, list) else []: + if resource_url := resource.get("url"): + resource["metadata"] = get_github_issue_pr_title_and_state( + organisation_id=organisation_id, resource_url=resource_url + ) + + return Response(data={"results": data}) + def create(self, request, *args, **kwargs): feature = get_object_or_404( Feature.objects.filter( @@ -40,7 +56,6 @@ def create(self, request, *args, **kwargs): ) or not hasattr(feature.project, "github_project") ): - return Response( data={ "detail": "This Project doesn't have a valid GitHub integration configuration" @@ -48,18 +63,8 @@ def create(self, request, *args, **kwargs): content_type="application/json", status=status.HTTP_400_BAD_REQUEST, ) - - try: - return super().create(request, *args, **kwargs) - - except IntegrityError as e: - if re.search(r"Key \(feature_id, url\)", str(e)) and re.search( - r"already exists.$", str(e) - ): - raise ValidationError( - detail="Duplication error. The feature already has this resource URI" - ) + return super().create(request, *args, **kwargs) def perform_update(self, serializer): - external_resource_id = int(self.kwargs["id"]) + external_resource_id = int(self.kwargs["pk"]) serializer.save(id=external_resource_id) diff --git a/api/features/managers.py b/api/features/managers.py index 9e8f0336fb00..b8f1acde0aa6 100644 --- a/api/features/managers.py +++ b/api/features/managers.py @@ -33,8 +33,10 @@ def get_live_feature_states( qs_filter = Q(environment=environment, deleted_at__isnull=True) if environment.use_v2_feature_versioning: - latest_versions = EnvironmentFeatureVersion.objects.get_latest_versions( - environment + latest_versions = ( + EnvironmentFeatureVersion.objects.get_latest_versions_by_environment_id( + environment.id + ) ) latest_version_uuids = [efv.uuid for efv in latest_versions] diff --git a/api/features/models.py b/api/features/models.py index 326d6fc521f3..6a80c5c90251 100644 --- a/api/features/models.py +++ b/api/features/models.py @@ -5,7 +5,6 @@ import typing import uuid from copy import deepcopy -from dataclasses import asdict from core.models import ( AbstractBaseExportableModel, @@ -23,6 +22,7 @@ from django.utils import timezone from django_lifecycle import ( AFTER_CREATE, + AFTER_DELETE, AFTER_SAVE, BEFORE_CREATE, BEFORE_SAVE, @@ -74,7 +74,6 @@ STRING, ) from features.versioning.models import EnvironmentFeatureVersion -from integrations.github.models import GithubConfiguration from metadata.models import Metadata from projects.models import Project from projects.tags.models import Tag @@ -139,10 +138,7 @@ class Meta: @hook(AFTER_SAVE) def create_github_comment(self) -> None: - from integrations.github.github import GithubData, generate_data - from integrations.github.tasks import ( - call_github_app_webhook_for_feature_state, - ) + from integrations.github.github import call_github_task from webhooks.webhooks import WebhookEventType if ( @@ -151,19 +147,14 @@ def create_github_comment(self) -> None: and self.project.organisation.github_config.exists() and self.deleted_at ): - github_configuration = GithubConfiguration.objects.get( - organisation_id=self.project.organisation_id - ) - feature_data: GithubData = generate_data( - github_configuration=github_configuration, - feature=self, + call_github_task( + organisation_id=self.project.organisation_id, type=WebhookEventType.FLAG_DELETED.value, - feature_states=[], - ) - - call_github_app_webhook_for_feature_state.delay( - args=(asdict(feature_data),), + feature=self, + segment_name=None, + url=None, + feature_states=None, ) @hook(AFTER_CREATE) @@ -219,6 +210,7 @@ def get_next_segment_priority(feature): class FeatureSegment( + LifecycleModelMixin, AbstractBaseExportableModel, OrderedModelBase, abstract_base_auditable_model_factory(["uuid"]), @@ -397,6 +389,11 @@ def to_id_priority_tuple_pairs( def get_audit_log_related_object_id(self, history_instance) -> int: return self.feature_id + def get_skip_create_audit_log(self) -> bool: + # Don't create audit logs when deleting feature segments using versioning + # v2 as we rely on the version history instead. + return self.environment_feature_version_id is not None + def get_delete_log_message(self, history_instance) -> typing.Optional[str]: return SEGMENT_FEATURE_STATE_DELETED_MESSAGE % ( self.feature.name, @@ -406,6 +403,26 @@ def get_delete_log_message(self, history_instance) -> typing.Optional[str]: def _get_environment(self) -> "Environment": return self.environment + @hook(AFTER_DELETE) + def create_github_comment(self) -> None: + from integrations.github.github import call_github_task + from webhooks.webhooks import WebhookEventType + + if ( + self.feature.external_resources.exists() + and self.feature.project.github_project.exists() + and self.feature.project.organisation.github_config.exists() + ): + + call_github_task( + self.feature.project.organisation_id, + WebhookEventType.SEGMENT_OVERRIDE_DELETED.value, + self.feature, + self.segment.name, + None, + None, + ) + class FeatureState( SoftDeleteExportableModel, @@ -1068,6 +1085,9 @@ def copy_from(self, source_feature_state_value: "FeatureStateValue"): self.string_value = source_feature_state_value.string_value self.save() + def get_skip_create_audit_log(self) -> bool: + return self.feature_state.get_skip_create_audit_log() + def get_update_log_message(self, history_instance) -> typing.Optional[str]: fs = self.feature_state diff --git a/api/features/serializers.py b/api/features/serializers.py index f026e0562e80..d98c4b70a14e 100644 --- a/api/features/serializers.py +++ b/api/features/serializers.py @@ -1,5 +1,4 @@ import typing -from dataclasses import asdict from datetime import datetime import django.core.exceptions @@ -13,9 +12,7 @@ from environments.sdk.serializers_mixins import ( HideSensitiveFieldsSerializerMixin, ) -from integrations.github.github import GithubData, generate_data -from integrations.github.models import GithubConfiguration -from integrations.github.tasks import call_github_app_webhook_for_feature_state +from integrations.github.github import call_github_task from metadata.serializers import MetadataSerializer, SerializerWithMetadata from projects.models import Project from users.serializers import ( @@ -474,23 +471,18 @@ def save(self, **kwargs): and feature_state.environment.project.github_project.exists() and feature_state.environment.project.organisation.github_config.exists() ): - github_configuration = GithubConfiguration.objects.get( - organisation_id=feature_state.environment.project.organisation_id - ) - feature_states = [] - feature_states.append(feature_state) - feature_data: GithubData = generate_data( - github_configuration=github_configuration, - feature=feature_state.feature, - type=WebhookEventType.FLAG_UPDATED.value, - feature_states=feature_states, - ) - call_github_app_webhook_for_feature_state.delay( - args=(asdict(feature_data),), + call_github_task( + organisation_id=feature_state.feature.project.organisation_id, + type=WebhookEventType.FLAG_UPDATED.value, + feature=feature_state.feature, + segment_name=None, + url=None, + feature_states=[feature_state], ) return response + except django.core.exceptions.ValidationError as e: raise serializers.ValidationError(str(e)) diff --git a/api/features/versioning/managers.py b/api/features/versioning/managers.py index 32c41ad0a831..d78131907460 100644 --- a/api/features/versioning/managers.py +++ b/api/features/versioning/managers.py @@ -1,11 +1,12 @@ import typing from pathlib import Path -from django.db.models.query import RawQuerySet +from django.db.models.query import QuerySet, RawQuerySet +from django.utils import timezone from softdelete.models import SoftDeleteManager if typing.TYPE_CHECKING: - from environments.models import Environment + from features.versioning.models import EnvironmentFeatureVersion with open(Path(__file__).parent.resolve() / "sql/get_latest_versions.sql") as f: @@ -13,11 +14,54 @@ class EnvironmentFeatureVersionManager(SoftDeleteManager): - def get_latest_versions(self, environment: "Environment") -> RawQuerySet: + def get_latest_versions_by_environment_id(self, environment_id: int) -> RawQuerySet: """ - Get the latest EnvironmentFeatureVersion objects - for a given environment. + Get the latest EnvironmentFeatureVersion objects for a given environment. """ + return self._get_latest_versions(environment_id=environment_id) + + def get_latest_versions_by_environment_api_key( + self, environment_api_key: str + ) -> RawQuerySet: + """ + Get the latest EnvironmentFeatureVersion objects for a given environment. + """ + return self._get_latest_versions(environment_api_key=environment_api_key) + + def get_latest_versions_as_queryset( + self, environment_id: int + ) -> QuerySet["EnvironmentFeatureVersion"]: + """ + Get the latest EnvironmentFeatureVersion objects for a given environment + as a concrete QuerySet. + + Note that it is often required to return the proper QuerySet to carry out + operations on the ORM object. + """ + return self.filter( + uuid__in=[ + efv.uuid + for efv in self._get_latest_versions(environment_id=environment_id) + ] + ) + + def _get_latest_versions( + self, environment_id: int = None, environment_api_key: str = None + ) -> RawQuerySet: + assert (environment_id or environment_api_key) and not ( + environment_id and environment_api_key + ), "Must provide exactly one of environment_id or environment_api_key" + return self.raw( - get_latest_versions_sql, params={"environment_id": environment.id} + get_latest_versions_sql, + params={ + "environment_id": environment_id, + "api_key": environment_api_key, + # TODO: + # It seems as though there is a timezone issue when using postgres's + # built in now() function, so we pass in the current time from python. + # Using <= now() in the SQL query returns incorrect results. + # More investigation is needed here to understand the cause. + "live_from_before": timezone.now().isoformat(), + }, ) diff --git a/api/features/versioning/migrations/0002_add_api_key_for_creation_and_publish.py b/api/features/versioning/migrations/0002_add_api_key_for_creation_and_publish.py new file mode 100644 index 000000000000..ea74f96c6d85 --- /dev/null +++ b/api/features/versioning/migrations/0002_add_api_key_for_creation_and_publish.py @@ -0,0 +1,35 @@ +# Generated by Django 3.2.25 on 2024-05-31 12:11 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('api_keys', '0003_masterapikey_is_admin'), + ('feature_versioning', '0001_add_environment_feature_state_version_logic'), + ] + + operations = [ + migrations.AddField( + model_name='environmentfeatureversion', + name='created_by_api_key', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='created_environment_feature_versions', to='api_keys.masterapikey'), + ), + migrations.AddField( + model_name='environmentfeatureversion', + name='published_by_api_key', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='published_environment_feature_versions', to='api_keys.masterapikey'), + ), + migrations.AddField( + model_name='historicalenvironmentfeatureversion', + name='created_by_api_key', + field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='api_keys.masterapikey'), + ), + migrations.AddField( + model_name='historicalenvironmentfeatureversion', + name='published_by_api_key', + field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='api_keys.masterapikey'), + ), + ] diff --git a/api/features/versioning/models.py b/api/features/versioning/models.py index cd885ba4d917..0aeb842c5372 100644 --- a/api/features/versioning/models.py +++ b/api/features/versioning/models.py @@ -1,6 +1,7 @@ import datetime import typing import uuid +from copy import deepcopy from core.models import ( SoftDeleteExportableModel, @@ -11,6 +12,7 @@ from django.db.models import Index from django.utils import timezone +from api_keys.models import MasterAPIKey from features.versioning.exceptions import FeatureVersioningError from features.versioning.managers import EnvironmentFeatureVersionManager from features.versioning.signals import environment_feature_version_published @@ -46,6 +48,14 @@ class EnvironmentFeatureVersion( null=True, blank=True, ) + created_by_api_key = models.ForeignKey( + "api_keys.MasterAPIKey", + related_name="created_environment_feature_versions", + on_delete=models.SET_NULL, + null=True, + blank=True, + ) + published_by = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="published_environment_feature_versions", @@ -53,6 +63,13 @@ class EnvironmentFeatureVersion( null=True, blank=True, ) + published_by_api_key = models.ForeignKey( + "api_keys.MasterAPIKey", + related_name="published_environment_feature_versions", + on_delete=models.SET_NULL, + null=True, + blank=True, + ) change_request = models.ForeignKey( "workflows_core.ChangeRequest", @@ -106,7 +123,7 @@ def get_previous_version(self) -> typing.Optional["EnvironmentFeatureVersion"]: self.__class__.objects.filter( environment=self.environment, feature=self.feature, - live_from__lt=timezone.now(), + live_from__lt=self.live_from or timezone.now(), published_at__isnull=False, ) .order_by("-live_from") @@ -117,14 +134,32 @@ def get_previous_version(self) -> typing.Optional["EnvironmentFeatureVersion"]: def publish( self, published_by: typing.Union["FFAdminUser", None] = None, + published_by_api_key: MasterAPIKey | None = None, live_from: datetime.datetime | None = None, persist: bool = True, ) -> None: + assert not ( + published_by and published_by_api_key + ), "Version must be published by either a user or a MasterAPIKey" + now = timezone.now() self.live_from = live_from or (self.live_from or now) self.published_at = now self.published_by = published_by + self.published_by_api_key = published_by_api_key + if persist: self.save() environment_feature_version_published.send(self.__class__, instance=self) + + def clone_to_environment( + self, environment: "Environment" + ) -> "EnvironmentFeatureVersion": + _clone = deepcopy(self) + + _clone.uuid = None + _clone.environment = environment + + _clone.save() + return _clone diff --git a/api/features/versioning/permissions.py b/api/features/versioning/permissions.py index 85d34a8647d3..538029704259 100644 --- a/api/features/versioning/permissions.py +++ b/api/features/versioning/permissions.py @@ -13,8 +13,8 @@ class EnvironmentFeatureVersionPermissions(BasePermission): def has_permission(self, request: Request, view: GenericViewSet) -> bool: - if view.action == "list": - # permissions for listing handled in view.get_queryset + if view.action in ("list", "retrieve"): + # permissions for listing and retrieving handled in view.get_queryset return True environment_pk = view.kwargs["environment_pk"] @@ -35,6 +35,13 @@ def has_object_permission( ) +class EnvironmentFeatureVersionRetrievePermissions(BasePermission): + def has_object_permission(self, request, view, obj): + return request.user.has_environment_permission( + VIEW_ENVIRONMENT, obj.environment + ) + + class EnvironmentFeatureVersionFeatureStatePermissions(BasePermission): def has_permission(self, request: Request, view: GenericViewSet) -> bool: environment_pk = view.kwargs["environment_pk"] diff --git a/api/features/versioning/receivers.py b/api/features/versioning/receivers.py index 5fa6525c99e3..94670aa35dd2 100644 --- a/api/features/versioning/receivers.py +++ b/api/features/versioning/receivers.py @@ -5,7 +5,10 @@ from environments.tasks import rebuild_environment_document from features.versioning.models import EnvironmentFeatureVersion from features.versioning.signals import environment_feature_version_published -from features.versioning.tasks import trigger_update_version_webhooks +from features.versioning.tasks import ( + create_environment_feature_version_published_audit_log_task, + trigger_update_version_webhooks, +) @receiver(post_save, sender=EnvironmentFeatureVersion) @@ -50,3 +53,12 @@ def trigger_webhooks(instance: EnvironmentFeatureVersion, **kwargs) -> None: kwargs={"environment_feature_version_uuid": str(instance.uuid)}, delay_until=instance.live_from, ) + + +@receiver(environment_feature_version_published, sender=EnvironmentFeatureVersion) +def create_environment_feature_version_published_audit_log( + instance: EnvironmentFeatureVersion, **kwargs +) -> None: + create_environment_feature_version_published_audit_log_task.delay( + kwargs={"environment_feature_version_uuid": str(instance.uuid)} + ) diff --git a/api/features/versioning/serializers.py b/api/features/versioning/serializers.py index 1b91b6047a69..1e5666005f32 100644 --- a/api/features/versioning/serializers.py +++ b/api/features/versioning/serializers.py @@ -1,8 +1,11 @@ from rest_framework import serializers +from api_keys.user import APIKeyUser from features.serializers import CreateSegmentOverrideFeatureStateSerializer from features.versioning.models import EnvironmentFeatureVersion +from integrations.github.github import call_github_task from users.models import FFAdminUser +from webhooks.webhooks import WebhookEventType class EnvironmentFeatureVersionFeatureStateSerializer( @@ -14,6 +17,28 @@ class Meta(CreateSegmentOverrideFeatureStateSerializer.Meta): + ("feature",) ) + def save(self, **kwargs): + response = super().save(**kwargs) + + feature_state = self.instance + if ( + not feature_state.identity_id + and feature_state.feature.external_resources.exists() + and feature_state.environment.project.github_project.exists() + and feature_state.environment.project.organisation.github_config.exists() + ): + + call_github_task( + organisation_id=feature_state.environment.project.organisation_id, + type=WebhookEventType.FLAG_UPDATED.value, + feature=feature_state.feature, + segment_name=None, + url=None, + feature_states=[feature_state], + ) + + return response + class EnvironmentFeatureVersionSerializer(serializers.ModelSerializer): class Meta: @@ -37,8 +62,31 @@ class Meta: "is_live", "published_by", "created_by", + "feature", + "environment", + ) + + +class EnvironmentFeatureVersionRetrieveSerializer(EnvironmentFeatureVersionSerializer): + previous_version_uuid = serializers.SerializerMethodField() + + class Meta(EnvironmentFeatureVersionSerializer.Meta): + _fields = ( + "previous_version_uuid", + "feature", + "environment", ) + fields = EnvironmentFeatureVersionSerializer.Meta.fields + _fields + + def get_previous_version_uuid( + self, environment_feature_version: EnvironmentFeatureVersion + ) -> str | None: + previous_version = environment_feature_version.get_previous_version() + if not previous_version: + return None + return str(previous_version.uuid) + class EnvironmentFeatureVersionPublishSerializer(serializers.Serializer): live_from = serializers.DateTimeField(required=False) @@ -47,9 +95,20 @@ def save(self, **kwargs): live_from = self.validated_data.get("live_from") request = self.context["request"] - published_by = request.user if isinstance(request.user, FFAdminUser) else None - self.instance.publish(live_from=live_from, published_by=published_by) + published_by = None + published_by_api_key = None + + if isinstance(request.user, FFAdminUser): + published_by = request.user + elif isinstance(request.user, APIKeyUser): + published_by_api_key = request.user.key + + self.instance.publish( + live_from=live_from, + published_by=published_by, + published_by_api_key=published_by_api_key, + ) return self.instance diff --git a/api/features/versioning/sql/get_latest_versions.sql b/api/features/versioning/sql/get_latest_versions.sql index 3c1257e8605b..08ca0e8919da 100644 --- a/api/features/versioning/sql/get_latest_versions.sql +++ b/api/features/versioning/sql/get_latest_versions.sql @@ -14,6 +14,7 @@ join ( where efv2."deleted_at" is null and efv2."published_at" is not null + and efv2."live_from" <= %(live_from_before)s group by efv2."feature_id", efv2."environment_id" @@ -21,5 +22,8 @@ join ( efv1."feature_id" = latest_release_dates."feature_id" and efv1."environment_id" = latest_release_dates."environment_id" and efv1."live_from" = latest_release_dates."latest_release" +inner join + environments_environment e on e.id = efv1.environment_id where - efv1.environment_id = %(environment_id)s; \ No newline at end of file + (%(environment_id)s is not null and efv1.environment_id = %(environment_id)s) + or (%(api_key)s is not null and e.api_key = %(api_key)s); \ No newline at end of file diff --git a/api/features/versioning/tasks.py b/api/features/versioning/tasks.py index 5f6e9a7228ce..a16224b49955 100644 --- a/api/features/versioning/tasks.py +++ b/api/features/versioning/tasks.py @@ -3,6 +3,9 @@ from django.utils import timezone +from audit.constants import ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE +from audit.models import AuditLog +from audit.related_object_type import RelatedObjectType from features.models import FeatureState from features.versioning.models import EnvironmentFeatureVersion from features.versioning.schemas import ( @@ -127,7 +130,26 @@ def trigger_update_version_webhooks(environment_feature_version_uuid: str) -> No data = environment_feature_version_webhook_schema.dump(environment_feature_version) call_environment_webhooks( - environment=environment_feature_version.environment, + environment=environment_feature_version.environment_id, data=data, event_type=WebhookEventType.NEW_VERSION_PUBLISHED, ) + + +@register_task_handler() +def create_environment_feature_version_published_audit_log_task( + environment_feature_version_uuid: str, +) -> None: + environment_feature_version = EnvironmentFeatureVersion.objects.select_related( + "environment", "feature" + ).get(uuid=environment_feature_version_uuid) + + AuditLog.objects.create( + environment=environment_feature_version.environment, + related_object_type=RelatedObjectType.EF_VERSION.name, + related_object_uuid=environment_feature_version.uuid, + log=ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE + % environment_feature_version.feature.name, + author_id=environment_feature_version.published_by_id, + master_api_key_id=environment_feature_version.published_by_api_key_id, + ) diff --git a/api/features/versioning/urls.py b/api/features/versioning/urls.py index a9b97b358fe0..fc7bc9322966 100644 --- a/api/features/versioning/urls.py +++ b/api/features/versioning/urls.py @@ -4,6 +4,7 @@ from features.versioning.views import ( EnvironmentFeatureVersionFeatureStatesViewSet, + EnvironmentFeatureVersionRetrieveAPIView, EnvironmentFeatureVersionViewSet, ) @@ -34,4 +35,9 @@ "environments//features//", include(ef_version_fs_router.urls), ), + path( + "environment-feature-versions//", + EnvironmentFeatureVersionRetrieveAPIView.as_view(), + name="get-efv-by-uuid", + ), ] diff --git a/api/features/versioning/views.py b/api/features/versioning/views.py index 3275db292843..14fa4929bbcc 100644 --- a/api/features/versioning/views.py +++ b/api/features/versioning/views.py @@ -4,6 +4,7 @@ from django.utils.decorators import method_decorator from drf_yasg.utils import swagger_auto_schema from rest_framework.decorators import action +from rest_framework.generics import RetrieveAPIView from rest_framework.mixins import ( CreateModelMixin, DestroyModelMixin, @@ -25,11 +26,13 @@ from features.versioning.permissions import ( EnvironmentFeatureVersionFeatureStatePermissions, EnvironmentFeatureVersionPermissions, + EnvironmentFeatureVersionRetrievePermissions, ) from features.versioning.serializers import ( EnvironmentFeatureVersionFeatureStateSerializer, EnvironmentFeatureVersionPublishSerializer, EnvironmentFeatureVersionQuerySerializer, + EnvironmentFeatureVersionRetrieveSerializer, EnvironmentFeatureVersionSerializer, ) from projects.permissions import VIEW_PROJECT @@ -48,7 +51,6 @@ class EnvironmentFeatureVersionViewSet( CreateModelMixin, DestroyModelMixin, ): - serializer_class = EnvironmentFeatureVersionSerializer permission_classes = [IsAuthenticated, EnvironmentFeatureVersionPermissions] def __init__(self, *args, **kwargs): @@ -62,6 +64,8 @@ def get_serializer_class(self): match self.action: case "publish": return EnvironmentFeatureVersionPublishSerializer + case "retrieve": + return EnvironmentFeatureVersionRetrieveSerializer case _: return EnvironmentFeatureVersionSerializer @@ -131,6 +135,22 @@ def publish(self, request: Request, **kwargs) -> Response: return Response(serializer.data) +class EnvironmentFeatureVersionRetrieveAPIView(RetrieveAPIView): + """ + This is an additional endpoint to retrieve a specific version without needing + to provide the environment or feature as part of the URL. + """ + + permission_classes = [ + IsAuthenticated, + EnvironmentFeatureVersionRetrievePermissions, + ] + serializer_class = EnvironmentFeatureVersionRetrieveSerializer + + def get_queryset(self): + return EnvironmentFeatureVersion.objects.all() + + class EnvironmentFeatureVersionFeatureStatesViewSet( GenericViewSet, ListModelMixin, diff --git a/api/features/views.py b/api/features/views.py index 6ac0312b8ebe..889e4218c37f 100644 --- a/api/features/views.py +++ b/api/features/views.py @@ -353,8 +353,12 @@ def get_influx_data(self, request, pk, project_pk): query_serializer = GetInfluxDataQuerySerializer(data=request.query_params) query_serializer.is_valid(raise_exception=True) + date_start = f"-{query_serializer.data['period']}" events_list = get_multiple_event_list_for_feature( - feature_name=feature.name, **query_serializer.data + feature_name=feature.name, + date_start=date_start, + environment_id=query_serializer.data["environment_id"], + aggregate_every=query_serializer.data["aggregate_every"], ) serializer = FeatureInfluxDataSerializer(instance={"events_list": events_list}) return Response(serializer.data) diff --git a/api/features/workflows/core/models.py b/api/features/workflows/core/models.py index bcb59d9d13cd..83cfb5e4d33e 100644 --- a/api/features/workflows/core/models.py +++ b/api/features/workflows/core/models.py @@ -38,6 +38,7 @@ from environments.tasks import rebuild_environment_document from features.models import FeatureState from features.versioning.models import EnvironmentFeatureVersion +from features.versioning.signals import environment_feature_version_published from features.versioning.tasks import trigger_update_version_webhooks from features.workflows.core.exceptions import ( CannotApproveOwnChangeRequest, @@ -169,6 +170,9 @@ def _publish_environment_feature_versions( kwargs={"environment_id": self.environment_id}, delay_until=environment_feature_version.live_from, ) + environment_feature_version_published.send( + EnvironmentFeatureVersion, instance=environment_feature_version + ) def get_create_log_message(self, history_instance) -> typing.Optional[str]: return CHANGE_REQUEST_CREATED_MESSAGE % self.title @@ -246,14 +250,14 @@ def prevent_change_request_delete_if_committed(self) -> None: # feature states, we also want to prevent it at the ORM level. if self.committed_at and not ( self.environment.deleted_at - or (self._live_from and self._live_from > timezone.now()) + or (self.live_from and self.live_from > timezone.now()) ): raise ChangeRequestDeletionError( "Cannot delete a Change Request that has been committed." ) @property - def _live_from(self) -> datetime | None: + def live_from(self) -> datetime | None: # First we check if there are feature states associated with the change request # and, if so, we return the live_from of the feature state with the earliest # live_from. diff --git a/api/integrations/github/client.py b/api/integrations/github/client.py index 83330b272cdd..b4307fd736f9 100644 --- a/api/integrations/github/client.py +++ b/api/integrations/github/client.py @@ -1,8 +1,52 @@ +import logging +from enum import Enum +from typing import Any + +import requests from django.conf import settings from github import Auth, Github +from integrations.github.constants import ( + GITHUB_API_CALLS_TIMEOUT, + GITHUB_API_URL, + GITHUB_API_VERSION, +) +from integrations.github.dataclasses import ( + IssueQueryParams, + PaginatedQueryParams, + RepoQueryParams, +) +from integrations.github.models import GithubConfiguration + +logger = logging.getLogger(__name__) + + +class ResourceType(Enum): + ISSUES = "issue" + PULL_REQUESTS = "pr" + + +def build_request_headers( + installation_id: str, use_jwt: bool = False +) -> dict[str, str]: + token = ( + generate_jwt_token(settings.GITHUB_APP_ID) + if use_jwt + else generate_token( + installation_id, + settings.GITHUB_APP_ID, + ) + ) -def generate_token(installation_id: str, app_id: int) -> str: + return { + "X-GitHub-Api-Version": GITHUB_API_VERSION, + "Accept": "application/vnd.github.v3+json", + "Authorization": f"Bearer {token}", + } + + +# TODO: Add test coverage for this function +def generate_token(installation_id: str, app_id: int) -> str: # pragma: no cover auth: Auth.AppInstallationAuth = Auth.AppAuth( app_id=int(app_id), private_key=settings.GITHUB_PEM ).get_installation_auth( @@ -12,3 +56,191 @@ def generate_token(installation_id: str, app_id: int) -> str: Github(auth=auth) token = auth.token return token + + +# TODO: Add test coverage for this function +def generate_jwt_token(app_id: int) -> str: # pragma: no cover + github_auth: Auth.AppAuth = Auth.AppAuth( + app_id=app_id, + private_key=settings.GITHUB_PEM, + ) + token = github_auth.create_jwt() + return token + + +def build_paginated_response( + results: list[dict[str, Any]], + response: requests.Response, + total_count: int | None = None, + incomplete_results: bool | None = None, +) -> dict[str, Any]: + data: dict[str, Any] = { + "results": results, + } + + if response.links.get("prev"): + data["previous"] = response.links.get("prev") + + if response.links.get("next"): + data["next"] = response.links.get("next") + + if total_count: + data["total_count"] = total_count + + if incomplete_results: + data["incomplete_results"] = incomplete_results + + return data + + +def post_comment_to_github( + installation_id: str, owner: str, repo: str, issue: str, body: str +) -> dict[str, Any]: + + url = f"{GITHUB_API_URL}repos/{owner}/{repo}/issues/{issue}/comments" + headers = build_request_headers(installation_id) + payload = {"body": body} + response = requests.post( + url, json=payload, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT + ) + response.raise_for_status() + + return response.json() + + +def delete_github_installation(installation_id: str) -> requests.Response: + url = f"{GITHUB_API_URL}app/installations/{installation_id}" + headers = build_request_headers(installation_id, use_jwt=True) + response = requests.delete(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + response.raise_for_status() + return response + + +def fetch_search_github_resource( + resource_type: ResourceType, + organisation_id: int, + params: IssueQueryParams, +) -> dict[str, Any]: + github_configuration = GithubConfiguration.objects.get( + organisation_id=organisation_id, deleted_at__isnull=True + ) + # Build Github search query + q = ["q="] + if params.search_text: + q.append(params.search_text) + q.append(f"repo:{params.repo_owner}/{params.repo_name}") + q.append(f"is:{resource_type.value}") + if params.state: + q.append(f"is:{params.state}") + q.append("in:title") + if params.search_in_body: + q.append("in:body") + if params.search_in_comments: + q.append("in:comments") + if params.author: + q.append(f"author:{params.author}") + if params.assignee: + q.append(f"assignee:{params.assignee}") + + url = ( + f"{GITHUB_API_URL}search/issues?" + + " ".join(q) + + f"&per_page={params.page_size}&page={params.page}" + ) + headers: dict[str, str] = build_request_headers( + github_configuration.installation_id + ) + response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + response.raise_for_status() + json_response = response.json() + results = [ + { + "html_url": i["html_url"], + "id": i["id"], + "title": i["title"], + "number": i["number"], + } + for i in json_response["items"] + ] + + return build_paginated_response( + results=results, + response=response, + total_count=json_response["total_count"], + incomplete_results=json_response["incomplete_results"], + ) + + +def fetch_github_repositories( + installation_id: str, + params: PaginatedQueryParams, +) -> dict[str, Any]: + url = ( + f"{GITHUB_API_URL}installation/repositories?" + + f"&per_page={params.page_size}&page={params.page}" + ) + + headers: dict[str, str] = build_request_headers(installation_id) + + response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + json_response = response.json() + response.raise_for_status() + results = [ + { + "full_name": i["full_name"], + "id": i["id"], + "name": i["name"], + } + for i in json_response["repositories"] + ] + + return build_paginated_response(results, response, json_response["total_count"]) + + +def get_github_issue_pr_title_and_state( + organisation_id: int, resource_url: str +) -> dict[str, str]: + url_parts = resource_url.split("/") + owner = url_parts[-4] + repo = url_parts[-3] + number = url_parts[-1] + installation_id = GithubConfiguration.objects.get( + organisation_id=organisation_id, deleted_at__isnull=True + ).installation_id + + url = f"{GITHUB_API_URL}repos/{owner}/{repo}/issues/{number}" + headers = build_request_headers(installation_id) + response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + response.raise_for_status() + json_response = response.json() + return {"title": json_response["title"], "state": json_response["state"]} + + +def fetch_github_repo_contributors( + organisation_id: int, + params: RepoQueryParams, +) -> dict[str, Any]: + installation_id = GithubConfiguration.objects.get( + organisation_id=organisation_id, deleted_at__isnull=True + ).installation_id + + url = ( + f"{GITHUB_API_URL}repos/{params.repo_owner}/{params.repo_name}/contributors?" + + f"&per_page={params.page_size}&page={params.page}" + ) + + headers = build_request_headers(installation_id) + response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + response.raise_for_status() + json_response = response.json() + + results = [ + { + "login": i["login"], + "avatar_url": i["avatar_url"], + "contributions": i["contributions"], + } + for i in json_response + ] + + return build_paginated_response(results, response) diff --git a/api/integrations/github/constants.py b/api/integrations/github/constants.py index 713e6e73a0e9..929ea690b87e 100644 --- a/api/integrations/github/constants.py +++ b/api/integrations/github/constants.py @@ -10,4 +10,8 @@ UNLINKED_FEATURE_TEXT = "### The feature flag `%s` was unlinked from the issue/PR" UPDATED_FEATURE_TEXT = "Flagsmith Feature `%s` has been updated:\n" DELETED_FEATURE_TEXT = "### The Feature Flag `%s` was deleted" +DELETED_SEGMENT_OVERRIDE_TEXT = ( + "### The Segment Override `%s` for Feature Flag `%s` was deleted" +) FEATURE_ENVIRONMENT_URL = "%s/project/%s/environment/%s/features?feature=%s&tab=%s" +GITHUB_API_CALLS_TIMEOUT = 10 diff --git a/api/integrations/github/dataclasses.py b/api/integrations/github/dataclasses.py new file mode 100644 index 000000000000..39ce51d999a4 --- /dev/null +++ b/api/integrations/github/dataclasses.py @@ -0,0 +1,55 @@ +from dataclasses import dataclass, field +from typing import Any, Optional + + +# Base Dataclasses +@dataclass +class GithubData: + installation_id: str + feature_id: int + feature_name: str + type: str + feature_states: list[dict[str, Any]] | None = None + url: str | None = None + project_id: int | None = None + segment_name: str | None = None + + @classmethod + def from_dict(cls, data_dict: dict[str, Any]) -> "GithubData": + return cls(**data_dict) + + +@dataclass +class CallGithubData: + event_type: str + github_data: GithubData + feature_external_resources: list[dict[str, Any]] + + +# Dataclasses for external calls to GitHub API +@dataclass +class PaginatedQueryParams: + page: int = field(default=1, kw_only=True) + page_size: int = field(default=100, kw_only=True) + + def __post_init__(self): + if self.page < 1: + raise ValueError("Page must be greater or equal than 1") + if self.page_size < 1 or self.page_size > 100: + raise ValueError("Page size must be an integer between 1 and 100") + + +@dataclass +class RepoQueryParams(PaginatedQueryParams): + repo_owner: str + repo_name: str + + +@dataclass +class IssueQueryParams(RepoQueryParams): + search_text: Optional[str] = None + state: Optional[str] = "open" + author: Optional[str] = None + assignee: Optional[str] = None + search_in_body: Optional[bool] = True + search_in_comments: Optional[bool] = False diff --git a/api/integrations/github/github.py b/api/integrations/github/github.py index 6e6d5f7758dc..ddbbdcc04698 100644 --- a/api/integrations/github/github.py +++ b/api/integrations/github/github.py @@ -1,70 +1,47 @@ import logging import typing -from dataclasses import dataclass +from dataclasses import asdict +from typing import Any -import requests from core.helpers import get_current_site_url -from django.conf import settings from django.utils.formats import get_format from features.models import Feature, FeatureState, FeatureStateValue -from integrations.github.client import generate_token from integrations.github.constants import ( DELETED_FEATURE_TEXT, + DELETED_SEGMENT_OVERRIDE_TEXT, FEATURE_ENVIRONMENT_URL, FEATURE_TABLE_HEADER, FEATURE_TABLE_ROW, - GITHUB_API_URL, LINK_FEATURE_TITLE, LINK_SEGMENT_TITLE, UNLINKED_FEATURE_TEXT, UPDATED_FEATURE_TEXT, ) +from integrations.github.dataclasses import GithubData from integrations.github.models import GithubConfiguration +from integrations.github.tasks import call_github_app_webhook_for_feature_state from webhooks.webhooks import WebhookEventType logger = logging.getLogger(__name__) -@dataclass -class GithubData: - installation_id: str - feature_id: int - feature_name: str - type: str - feature_states: typing.List[dict[str, typing.Any]] | None = None - url: str | None = None - project_id: int | None = None - - @classmethod - def from_dict(cls, data_dict: dict) -> "GithubData": - return cls(**data_dict) - - -def post_comment_to_github( - installation_id: str, owner: str, repo: str, issue: str, body: str -) -> typing.Optional[typing.Dict[str, typing.Any]]: - try: - token = generate_token( - installation_id, - settings.GITHUB_APP_ID, - ) - - url = f"{GITHUB_API_URL}repos/{owner}/{repo}/issues/{issue}/comments" - headers = { - "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}", - } +def handle_installation_deleted(payload: dict[str, Any]) -> None: + installation_id = payload.get("installation", {}).get("id") + if installation_id is not None: + try: + GithubConfiguration.objects.get(installation_id=installation_id).delete() + except GithubConfiguration.DoesNotExist: + logger.error( + f"GitHub Configuration with installation_id {installation_id} does not exist" + ) + else: + logger.error(f"The installation_id is not present in the payload: {payload}") - payload = {"body": body} - response = response = requests.post( - url, json=payload, headers=headers, timeout=10 - ) - return response.json() if response.status_code == 201 else None - except requests.RequestException as e: - logger.error(f" {e}") - return None +def handle_github_webhook_event(event_type: str, payload: dict[str, Any]) -> None: + if event_type == "installation" and payload.get("action") == "deleted": + handle_installation_deleted(payload) def generate_body_comment( @@ -72,18 +49,24 @@ def generate_body_comment( event_type: str, project_id: int, feature_id: int, - feature_states: typing.List[typing.Dict[str, typing.Any]], + feature_states: list[dict[str, typing.Any]], + segment_name: str | None = None, ) -> str: is_update = event_type == WebhookEventType.FLAG_UPDATED.value is_removed = event_type == WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value - delete_text = UNLINKED_FEATURE_TEXT % (name,) + is_segment_override_deleted = ( + event_type == WebhookEventType.SEGMENT_OVERRIDE_DELETED.value + ) if event_type == WebhookEventType.FLAG_DELETED.value: - return DELETED_FEATURE_TEXT % (name,) + return DELETED_FEATURE_TEXT % (name) if is_removed: - return delete_text + return UNLINKED_FEATURE_TEXT % (name) + + if is_segment_override_deleted and segment_name is not None: + return DELETED_SEGMENT_OVERRIDE_TEXT % (segment_name, name) result = UPDATED_FEATURE_TEXT % (name) if is_update else LINK_FEATURE_TITLE % (name) last_segment_name = "" @@ -131,6 +114,7 @@ def generate_data( typing.Union[list[FeatureState], list[FeatureStateValue]] | None ) = None, url: str | None = None, + segment_name: str | None = None, ) -> GithubData: if feature_states: feature_states_list = [] @@ -171,4 +155,32 @@ def generate_data( ), feature_states=feature_states_list if feature_states else None, project_id=feature.project_id, + segment_name=segment_name, + ) + + +def call_github_task( + organisation_id: str, + type: str, + feature: Feature, + segment_name: str | None, + url: str | None, + feature_states: typing.Union[list[typing.Any], list[typing.Any]] | None, +) -> None: + + github_configuration = GithubConfiguration.objects.get( + organisation_id=organisation_id + ) + + feature_data: GithubData = generate_data( + github_configuration=github_configuration, + feature=feature, + type=type, + url=url, + segment_name=segment_name, + feature_states=feature_states, + ) + + call_github_app_webhook_for_feature_state.delay( + args=(asdict(feature_data),), ) diff --git a/api/integrations/github/migrations/0003_auto_20240528_0640.py b/api/integrations/github/migrations/0003_auto_20240528_0640.py new file mode 100644 index 000000000000..a9861fe87cf9 --- /dev/null +++ b/api/integrations/github/migrations/0003_auto_20240528_0640.py @@ -0,0 +1,29 @@ +# Generated by Django 3.2.25 on 2024-05-28 06:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('github', '0002_auto_20240502_1949'), + ] + + operations = [ + migrations.AlterModelOptions( + name='githubconfiguration', + options={'ordering': ('id',)}, + ), + migrations.AlterModelOptions( + name='githubrepository', + options={'ordering': ('id',)}, + ), + migrations.RemoveConstraint( + model_name='githubrepository', + name='unique_repository_data', + ), + migrations.AddConstraint( + model_name='githubrepository', + constraint=models.UniqueConstraint(condition=models.Q(('deleted_at__isnull', True)), fields=('github_configuration', 'project', 'repository_owner', 'repository_name'), name='unique_repository_data'), + ), + ] diff --git a/api/integrations/github/models.py b/api/integrations/github/models.py index 22dcd6ee5333..532e0760b9ff 100644 --- a/api/integrations/github/models.py +++ b/api/integrations/github/models.py @@ -1,4 +1,5 @@ import logging +import re from core.models import SoftDeleteExportableModel from django.db import models @@ -31,6 +32,7 @@ class Meta: condition=models.Q(deleted_at__isnull=True), ) ] + ordering = ("id",) class GithubRepository(LifecycleModelMixin, SoftDeleteExportableModel): @@ -57,8 +59,10 @@ class Meta: "repository_name", ], name="unique_repository_data", + condition=models.Q(deleted_at__isnull=True), ) ] + ordering = ("id",) @hook(BEFORE_DELETE) def delete_feature_external_resources( @@ -66,12 +70,17 @@ def delete_feature_external_resources( ) -> None: from features.feature_external_resources.models import ( FeatureExternalResource, + ResourceType, ) + pattern = re.escape(f"/{self.repository_owner}/{self.repository_name}/") + FeatureExternalResource.objects.filter( feature_id__in=self.project.features.values_list("id", flat=True), type__in=[ - FeatureExternalResource.ResourceType.GITHUB_ISSUE, - FeatureExternalResource.ResourceType.GITHUB_PR, + ResourceType.GITHUB_ISSUE, + ResourceType.GITHUB_PR, ], + # Filter by url containing the repository owner and name + url__regex=pattern, ).delete() diff --git a/api/integrations/github/serializers.py b/api/integrations/github/serializers.py index 0330096cae2d..b3dc96c5263e 100644 --- a/api/integrations/github/serializers.py +++ b/api/integrations/github/serializers.py @@ -1,7 +1,13 @@ from rest_framework import serializers from rest_framework.serializers import ModelSerializer +from rest_framework_dataclasses.serializers import DataclassSerializer -from .models import GithubConfiguration, GithubRepository +from integrations.github.dataclasses import ( + IssueQueryParams, + PaginatedQueryParams, + RepoQueryParams, +) +from integrations.github.models import GithubConfiguration, GithubRepository class GithubConfigurationSerializer(ModelSerializer): @@ -14,6 +20,7 @@ class Meta: class GithubRepositorySerializer(ModelSerializer): class Meta: model = GithubRepository + optional_fields = ("search_text", "page") fields = ( "id", "github_configuration", @@ -27,6 +34,18 @@ class Meta: ) -class RepoQuerySerializer(serializers.Serializer): - repo_owner = serializers.CharField(required=True) - repo_name = serializers.CharField(required=True) +class PaginatedQueryParamsSerializer(DataclassSerializer): + class Meta: + dataclass = PaginatedQueryParams + + +class RepoQueryParamsSerializer(DataclassSerializer): + class Meta: + dataclass = RepoQueryParams + + +class IssueQueryParamsSerializer(DataclassSerializer): + class Meta: + dataclass = IssueQueryParams + + search_in_body = serializers.BooleanField(required=False, default=True) diff --git a/api/integrations/github/tasks.py b/api/integrations/github/tasks.py index e241af521475..384bb88f8960 100644 --- a/api/integrations/github/tasks.py +++ b/api/integrations/github/tasks.py @@ -1,28 +1,19 @@ import logging -from dataclasses import dataclass -from typing import Any +from typing import Any, List from urllib.parse import urlparse from features.models import Feature -from integrations.github.github import ( - GithubData, - generate_body_comment, - post_comment_to_github, -) +from integrations.github.client import post_comment_to_github +from integrations.github.dataclasses import CallGithubData from task_processor.decorators import register_task_handler from webhooks.webhooks import WebhookEventType logger = logging.getLogger(__name__) -@dataclass -class CallGithubData: - event_type: str - github_data: GithubData - feature_external_resources: list[dict[str, Any]] - - def send_post_request(data: CallGithubData) -> None: + from integrations.github.github import generate_body_comment + feature_name = data.github_data.feature_name feature_id = data.github_data.feature_id project_id = data.github_data.project_id @@ -31,8 +22,9 @@ def send_post_request(data: CallGithubData) -> None: data.github_data.feature_states if data.github_data.feature_states else None ) installation_id = data.github_data.installation_id + segment_name: str | None = data.github_data.segment_name body = generate_body_comment( - feature_name, event_type, project_id, feature_id, feature_states + feature_name, event_type, project_id, feature_id, feature_states, segment_name ) if ( @@ -71,11 +63,12 @@ def call_github_app_webhook_for_feature_state(event_data: dict[str, Any]) -> Non from features.feature_external_resources.models import ( FeatureExternalResource, ) + from integrations.github.github import GithubData github_event_data = GithubData.from_dict(event_data) def generate_feature_external_resources( - feature_external_resources: FeatureExternalResource, + feature_external_resources: List[FeatureExternalResource], ) -> list[dict[str, Any]]: return [ { @@ -85,10 +78,15 @@ def generate_feature_external_resources( for resource in feature_external_resources ] - if github_event_data.type == WebhookEventType.FLAG_DELETED.value: + if ( + github_event_data.type == WebhookEventType.FLAG_DELETED.value + or github_event_data.type == WebhookEventType.SEGMENT_OVERRIDE_DELETED.value + ): feature_external_resources = generate_feature_external_resources( - FeatureExternalResource.objects.filter( - feature_id=github_event_data.feature_id + list( + FeatureExternalResource.objects.filter( + feature_id=github_event_data.feature_id + ) ) ) data = CallGithubData( diff --git a/api/integrations/github/views.py b/api/integrations/github/views.py index c62f49d60300..4cc2f3efea57 100644 --- a/api/integrations/github/views.py +++ b/api/integrations/github/views.py @@ -1,6 +1,8 @@ import json +import logging import re from functools import wraps +from typing import Any, Callable import requests from django.conf import settings @@ -11,23 +13,31 @@ from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.response import Response -from integrations.github.client import generate_token -from integrations.github.constants import GITHUB_API_URL, GITHUB_API_VERSION +from integrations.github.client import ( + ResourceType, + delete_github_installation, + fetch_github_repo_contributors, + fetch_github_repositories, + fetch_search_github_resource, +) from integrations.github.exceptions import DuplicateGitHubIntegration +from integrations.github.github import handle_github_webhook_event from integrations.github.helpers import github_webhook_payload_is_valid from integrations.github.models import GithubConfiguration, GithubRepository from integrations.github.permissions import HasPermissionToGithubConfiguration from integrations.github.serializers import ( GithubConfigurationSerializer, GithubRepositorySerializer, - RepoQuerySerializer, + IssueQueryParamsSerializer, + PaginatedQueryParamsSerializer, + RepoQueryParamsSerializer, ) -from organisations.models import Organisation from organisations.permissions.permissions import GithubIsAdminOrganisation +logger = logging.getLogger(__name__) -def github_auth_required(func): +def github_auth_required(func): @wraps(func) def wrapper(request, organisation_pk): @@ -46,6 +56,34 @@ def wrapper(request, organisation_pk): return wrapper +def github_api_call_error_handler( + error: str | None = None, +) -> Callable[..., Callable[..., Any]]: + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs) -> Response: + default_error = "Failed to retrieve requested information from GitHub API." + try: + return func(*args, **kwargs) + except ValueError as e: + return Response( + data={"detail": (f"{error or default_error}" f" Error: {str(e)}")}, + content_type="application/json", + status=status.HTTP_400_BAD_REQUEST, + ) + except requests.RequestException as e: + logger.error(f"{error or default_error} Error: {str(e)}", exc_info=e) + return Response( + data={"detail": (f"{error or default_error}" f" Error: {str(e)}")}, + content_type="application/json", + status=status.HTTP_502_BAD_GATEWAY, + ) + + return wrapper + + return decorator + + class GithubConfigurationViewSet(viewsets.ModelViewSet): permission_classes = ( IsAuthenticated, @@ -71,6 +109,11 @@ def create(self, request, *args, **kwargs): if re.search(r"Key \(organisation_id\)=\(\d+\) already exists", str(e)): raise DuplicateGitHubIntegration + @github_api_call_error_handler(error="Failed to delete GitHub Installation.") + def destroy(self, request, *args, **kwargs): + delete_github_installation(self.get_object().installation_id) + return super().destroy(request, *args, **kwargs) + class GithubRepositoryViewSet(viewsets.ModelViewSet): permission_classes = ( @@ -86,9 +129,12 @@ def perform_create(self, serializer): serializer.save(github_configuration_id=github_configuration_id) def get_queryset(self): - return GithubRepository.objects.filter( - github_configuration=self.kwargs["github_pk"] - ) + try: + if github_pk := self.kwargs.get("github_pk"): + int(github_pk) + return GithubRepository.objects.filter(github_configuration=github_pk) + except ValueError: + raise ValidationError({"github_pk": ["Must be an integer"]}) def create(self, request, *args, **kwargs): @@ -108,103 +154,89 @@ def create(self, request, *args, **kwargs): @api_view(["GET"]) @permission_classes([IsAuthenticated, HasPermissionToGithubConfiguration]) @github_auth_required +@github_api_call_error_handler(error="Failed to retrieve GitHub pull requests.") def fetch_pull_requests(request, organisation_pk) -> Response: - organisation = Organisation.objects.get(id=organisation_pk) - github_configuration = GithubConfiguration.objects.get( - organisation=organisation, deleted_at__isnull=True - ) - token = generate_token( - github_configuration.installation_id, - settings.GITHUB_APP_ID, - ) - - query_serializer = RepoQuerySerializer(data=request.query_params) + query_serializer = IssueQueryParamsSerializer(data=request.query_params) if not query_serializer.is_valid(): return Response({"error": query_serializer.errors}, status=400) - repo_owner = query_serializer.validated_data.get("repo_owner") - repo_name = query_serializer.validated_data.get("repo_name") - - url = f"{GITHUB_API_URL}repos/{repo_owner}/{repo_name}/pulls" - - headers = { - "X-GitHub-Api-Version": GITHUB_API_VERSION, - "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}", - } - - try: - response = requests.get(url, headers=headers, timeout=10) - response.raise_for_status() - data = response.json() - return Response(data) - except requests.RequestException as e: - return Response({"error": str(e)}, status=500) + data = fetch_search_github_resource( + resource_type=ResourceType.PULL_REQUESTS, + organisation_id=organisation_pk, + params=query_serializer.validated_data, + ) + return Response( + data=data, + content_type="application/json", + status=status.HTTP_200_OK, + ) @api_view(["GET"]) @permission_classes([IsAuthenticated, HasPermissionToGithubConfiguration]) @github_auth_required -def fetch_issues(request, organisation_pk) -> Response: - organisation = Organisation.objects.get(id=organisation_pk) - github_configuration = GithubConfiguration.objects.get( - organisation=organisation, deleted_at__isnull=True - ) - token = generate_token( - github_configuration.installation_id, - settings.GITHUB_APP_ID, - ) - - query_serializer = RepoQuerySerializer(data=request.query_params) +@github_api_call_error_handler(error="Failed to retrieve GitHub issues.") +def fetch_issues(request, organisation_pk) -> Response | None: + query_serializer = IssueQueryParamsSerializer(data=request.query_params) if not query_serializer.is_valid(): return Response({"error": query_serializer.errors}, status=400) - repo_owner = query_serializer.validated_data.get("repo_owner") - repo_name = query_serializer.validated_data.get("repo_name") - - url = f"{GITHUB_API_URL}repos/{repo_owner}/{repo_name}/issues" - - headers = { - "X-GitHub-Api-Version": GITHUB_API_VERSION, - "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}", - } - - try: - response = requests.get(url, headers=headers, timeout=10) - response.raise_for_status() - data = response.json() - filtered_data = [issue for issue in data if "pull_request" not in issue] - return Response(filtered_data) - except requests.RequestException as e: - return Response({"error": str(e)}, status=500) + data = fetch_search_github_resource( + resource_type=ResourceType.ISSUES, + organisation_id=organisation_pk, + params=query_serializer.validated_data, + ) + return Response( + data=data, + content_type="application/json", + status=status.HTTP_200_OK, + ) @api_view(["GET"]) @permission_classes([IsAuthenticated, GithubIsAdminOrganisation]) -def fetch_repositories(request, organisation_pk: int) -> Response: +@github_api_call_error_handler(error="Failed to retrieve GitHub repositories.") +def fetch_repositories(request, organisation_pk: int) -> Response | None: + query_serializer = PaginatedQueryParamsSerializer(data=request.query_params) + if not query_serializer.is_valid(): + return Response({"error": query_serializer.errors}, status=400) installation_id = request.GET.get("installation_id") - token = generate_token( - installation_id, - settings.GITHUB_APP_ID, + if not installation_id: + return Response( + data={"detail": "Missing installation_id parameter"}, + content_type="application/json", + status=status.HTTP_400_BAD_REQUEST, + ) + + data = fetch_github_repositories( + installation_id=installation_id, params=query_serializer.validated_data + ) + return Response( + data=data, + content_type="application/json", + status=status.HTTP_200_OK, ) - url = f"{GITHUB_API_URL}installation/repositories" - headers = { - "X-GitHub-Api-Version": GITHUB_API_VERSION, - "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}", - } +@api_view(["GET"]) +@permission_classes([IsAuthenticated, HasPermissionToGithubConfiguration]) +@github_auth_required +@github_api_call_error_handler(error="Failed to retrieve GitHub repo contributors.") +def fetch_repo_contributors(request, organisation_pk) -> Response: + query_serializer = RepoQueryParamsSerializer(data=request.query_params) + if not query_serializer.is_valid(): + return Response({"error": query_serializer.errors}, status=400) + + response = fetch_github_repo_contributors( + organisation_id=organisation_pk, params=query_serializer.validated_data + ) - try: - response = requests.get(url, headers=headers, timeout=10) - response.raise_for_status() - data = response.json() - return Response(data) - except requests.RequestException as e: - return Response({"error": str(e)}, status=500) + return Response( + data=response, + content_type="application/json", + status=status.HTTP_200_OK, + ) @api_view(["POST"]) @@ -218,11 +250,8 @@ def github_webhook(request) -> Response: payload_body=payload, secret_token=secret, signature_header=signature ): data = json.loads(payload.decode("utf-8")) - # handle GitHub Webhook "installation" event with action type "deleted" - if github_event == "installation" and data["action"] == "deleted": - GithubConfiguration.objects.filter( - installation_id=data["installation"]["id"] - ).delete() + if github_event == "installation": + handle_github_webhook_event(event_type=github_event, payload=data) return Response({"detail": "Event processed"}, status=200) else: return Response({"detail": "Event bypassed"}, status=200) diff --git a/api/organisations/chargebee/__init__.py b/api/organisations/chargebee/__init__.py index a12b60779abb..66a60c5e27be 100644 --- a/api/organisations/chargebee/__init__.py +++ b/api/organisations/chargebee/__init__.py @@ -1,7 +1,7 @@ from .chargebee import ( # noqa - add_1000_api_calls, - add_1000_api_calls_scale_up, - add_1000_api_calls_start_up, + add_100k_api_calls, + add_100k_api_calls_scale_up, + add_100k_api_calls_start_up, add_single_seat, extract_subscription_metadata, get_customer_id_from_subscription_id, diff --git a/api/organisations/chargebee/chargebee.py b/api/organisations/chargebee/chargebee.py index 1f10c9f6630c..eb2eb48ec417 100644 --- a/api/organisations/chargebee/chargebee.py +++ b/api/organisations/chargebee/chargebee.py @@ -209,19 +209,19 @@ def add_single_seat(subscription_id: str): raise UpgradeSeatsError(msg) from e -def add_1000_api_calls_start_up( +def add_100k_api_calls_start_up( subscription_id: str, count: int = 1, invoice_immediately: bool = False ) -> None: - add_1000_api_calls(ADDITIONAL_API_START_UP_ADDON_ID, subscription_id, count) + add_100k_api_calls(ADDITIONAL_API_START_UP_ADDON_ID, subscription_id, count) -def add_1000_api_calls_scale_up( +def add_100k_api_calls_scale_up( subscription_id: str, count: int = 1, invoice_immediately: bool = False ) -> None: - add_1000_api_calls(ADDITIONAL_API_SCALE_UP_ADDON_ID, subscription_id, count) + add_100k_api_calls(ADDITIONAL_API_SCALE_UP_ADDON_ID, subscription_id, count) -def add_1000_api_calls( +def add_100k_api_calls( addon_id: str, subscription_id: str, count: int = 1, diff --git a/api/organisations/models.py b/api/organisations/models.py index 5cff7d353838..524457f96343 100644 --- a/api/organisations/models.py +++ b/api/organisations/models.py @@ -498,10 +498,12 @@ class OrganisationAPIBilling(models.Model): limits. This model is what allows subsequent billing runs to not double bill an organisation for the same use. - Even though api_overage is charge per thousand API calls, this + Even though api_overage is charge per 100k API calls, this class tracks the actual rounded count of API calls that are - billed for (i.e., 52000 for an account with 52233 api calls). - We're intentionally rounding down to the closest thousands. + billed for (i.e., 200000 for an account with 234323 api calls + and a allowed_30d_api_calls set to 100000, the overage is + beyond the allowed api calls). + We're intentionally rounding up to the closest hundred thousand. The option to set immediate_invoice means whether or not the API billing was processed immediately versus pushed onto the diff --git a/api/organisations/subscription_info_cache.py b/api/organisations/subscription_info_cache.py index b4e69d98980c..9fc0819b1d84 100644 --- a/api/organisations/subscription_info_cache.py +++ b/api/organisations/subscription_info_cache.py @@ -70,9 +70,9 @@ def _update_caches_with_influx_data( if not settings.INFLUXDB_TOKEN: return - for date_range, limit in (("30d", ""), ("7d", ""), ("24h", "100")): - key = f"api_calls_{date_range}" - org_calls = get_top_organisations(date_range, limit) + for date_start, limit in (("-30d", ""), ("-7d", ""), ("-24h", "100")): + key = f"api_calls_{date_start[1:]}" + org_calls = get_top_organisations(date_start, limit) for org_id, calls in org_calls.items(): subscription_info_cache = organisation_info_cache_dict.get(org_id) if not subscription_info_cache: diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index b9bae7ce1961..882927f2e2e6 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -1,4 +1,5 @@ import logging +import math from datetime import timedelta from app_analytics.influxdb_wrapper import get_current_api_usage @@ -12,8 +13,8 @@ from integrations.flagsmith.client import get_client from organisations import subscription_info_cache from organisations.chargebee import ( - add_1000_api_calls_scale_up, - add_1000_api_calls_start_up, + add_100k_api_calls_scale_up, + add_100k_api_calls_start_up, ) from organisations.models import ( APILimitAccessBlock, @@ -158,7 +159,7 @@ def _handle_api_usage_notifications(organisation: Organisation) -> None: period_starts_at = relativedelta(months=month_delta) + billing_starts_at days = relativedelta(now, period_starts_at).days - api_usage = get_current_api_usage(organisation.id, f"{days}d") + api_usage = get_current_api_usage(organisation.id, f"-{days}d") api_usage_percent = int(100 * api_usage / subscription_cache.allowed_30d_api_calls) @@ -230,11 +231,7 @@ def charge_for_api_call_count_overages(): OrganisationAPIUsageNotification.objects.filter( notified_at__gte=api_usage_notified_at, percent_usage__gte=100, - ) - .exclude( - organisation__api_billing__billed_at__gt=api_usage_notified_at, - ) - .values_list("organisation_id", flat=True) + ).values_list("organisation_id", flat=True) ) for organisation in Organisation.objects.filter( @@ -255,21 +252,32 @@ def charge_for_api_call_count_overages(): ): subscription_cache = organisation.subscription_information_cache api_usage = get_current_api_usage(organisation.id, "30d") - api_usage_ratio = api_usage / subscription_cache.allowed_30d_api_calls - if api_usage_ratio < 1.0: - logger.warning("API Usage does not match API Notification") + # Grace period for organisations < 200% of usage. + if api_usage / subscription_cache.allowed_30d_api_calls < 2.0: + logger.info("API Usage below normal usage or grace period.") continue - api_overage = api_usage - subscription_cache.allowed_30d_api_calls + api_billings = OrganisationAPIBilling.objects.filter( + billed_at__gte=subscription_cache.current_billing_term_starts_at + ) + previous_api_overage = sum([ap.api_overage for ap in api_billings]) + + api_limit = subscription_cache.allowed_30d_api_calls + previous_api_overage + api_overage = api_usage - api_limit + if api_overage <= 0: + logger.info("API Usage below current API limit.") + continue if organisation.subscription.plan in {SCALE_UP, SCALE_UP_V2}: - add_1000_api_calls_scale_up( - organisation.subscription.subscription_id, api_overage // 1000 + add_100k_api_calls_scale_up( + organisation.subscription.subscription_id, + math.ceil(api_overage / 100_000), ) elif organisation.subscription.plan in {STARTUP, STARTUP_V2}: - add_1000_api_calls_start_up( - organisation.subscription.subscription_id, api_overage // 1000 + add_100k_api_calls_start_up( + organisation.subscription.subscription_id, + math.ceil(api_overage / 100_000), ) else: logger.error( @@ -281,7 +289,7 @@ def charge_for_api_call_count_overages(): # double billing on a subsequent task run. OrganisationAPIBilling.objects.create( organisation=organisation, - api_overage=(1000 * (api_overage // 1000)), + api_overage=(100_000 * math.ceil(api_overage / 100_000)), immediate_invoice=False, billed_at=now, ) diff --git a/api/organisations/urls.py b/api/organisations/urls.py index 5acd955b0dde..5504a0afa24f 100644 --- a/api/organisations/urls.py +++ b/api/organisations/urls.py @@ -13,6 +13,7 @@ GithubRepositoryViewSet, fetch_issues, fetch_pull_requests, + fetch_repo_contributors, fetch_repositories, ) from metadata.views import MetaDataModelFieldViewSet @@ -124,6 +125,11 @@ fetch_issues, name="get-github-issues", ), + path( + "/github/repo-contributors/", + fetch_repo_contributors, + name="get-github-repo-contributors", + ), path( "/github/pulls/", fetch_pull_requests, diff --git a/api/organisations/views.py b/api/organisations/views.py index 4c2e4c31f441..5fb5ca85a4fd 100644 --- a/api/organisations/views.py +++ b/api/organisations/views.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import logging +from datetime import timedelta from app_analytics.influxdb_wrapper import ( get_events_for_organisation, @@ -324,6 +325,10 @@ def get_queryset(self): billing_starts_at = subscription_cache.current_billing_term_starts_at now = timezone.now() + # Handle case where billing dates are not set (most often in a free plan) + # by defaulting to something as a reasonable default. + billing_starts_at = billing_starts_at or now - timedelta(days=30) + month_delta = relativedelta(now, billing_starts_at).months period_starts_at = relativedelta(months=month_delta) + billing_starts_at diff --git a/api/poetry.lock b/api/poetry.lock index 5ed254966f9c..1d4b4999d013 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,127 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.9.4" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, - {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, - {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, - {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, - {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, - {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, - {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, - {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, - {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, - {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, - {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, - {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiohttp-retry" -version = "2.8.3" -description = "Simple retry client for aiohttp" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiohttp_retry-2.8.3-py3-none-any.whl", hash = "sha256:3aeeead8f6afe48272db93ced9440cf4eda8b6fd7ee2abb25357b7eb28525b45"}, - {file = "aiohttp_retry-2.8.3.tar.gz", hash = "sha256:9a8e637e31682ad36e1ff9f8bcba912fcfc7d7041722bc901a4b948da4d71ea9"}, -] - -[package.dependencies] -aiohttp = "*" - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -188,6 +65,17 @@ files = [ lazy-object-proxy = ">=1.4.0" wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -1198,35 +1086,19 @@ files = [ [package.dependencies] typing_extensions = ">=3.6,<5" -[[package]] -name = "django-trench" -version = "0.2.3" -description = "REST Multi-factor authentication package for Django" -optional = false -python-versions = "*" -files = [ - {file = "django-trench-0.2.3.tar.gz", hash = "sha256:63e189a057c45198d178ea79337e690250b484fcd8ff2057c9fd4b3699639853"}, -] - -[package.dependencies] -pyotp = ">=2.2.6" -smsapi-client = ">=2.2.5" -twilio = ">=6.18.1" -yubico-client = ">=1.10.0" - [[package]] name = "djangorestframework" -version = "3.12.4" +version = "3.15.1" description = "Web APIs for Django, made easy." optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "djangorestframework-3.12.4-py3-none-any.whl", hash = "sha256:6d1d59f623a5ad0509fe0d6bfe93cbdfe17b8116ebc8eda86d45f6e16e819aaf"}, - {file = "djangorestframework-3.12.4.tar.gz", hash = "sha256:f747949a8ddac876e879190df194b925c177cdeb725a099db1460872f7c0a7f2"}, + {file = "djangorestframework-3.15.1-py3-none-any.whl", hash = "sha256:3ccc0475bce968608cf30d07fb17d8e52d1d7fc8bfe779c905463200750cbca6"}, + {file = "djangorestframework-3.15.1.tar.gz", hash = "sha256:f88fad74183dfc7144b2756d0d2ac716ea5b4c7c9840995ac3bfd8ec034333c1"}, ] [package.dependencies] -django = ">=2.2" +django = ">=3.0" [[package]] name = "djangorestframework-api-key" @@ -1239,6 +1111,25 @@ files = [ {file = "djangorestframework_api_key-2.2.0-py3-none-any.whl", hash = "sha256:c9884e52f2802994a02781eaba0a63a628a150ed55d58603d5c9c57a6657de43"}, ] +[[package]] +name = "djangorestframework-dataclasses" +version = "1.3.1" +description = "A dataclasses serializer for Django REST Framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "djangorestframework-dataclasses-1.3.1.tar.gz", hash = "sha256:d3796b5ce3f7266d525493c557ce7df9ffeae4367006250298ea4d94da4106c4"}, + {file = "djangorestframework_dataclasses-1.3.1-py3-none-any.whl", hash = "sha256:ca1aa1ca99b5306af874376f37355593bb3d1ac7d658d54e2790f9b303968065"}, +] + +[package.dependencies] +django = ">=2.0" +djangorestframework = ">=3.9" + +[package.extras] +dev = ["django-stubs", "djangorestframework-stubs", "mypy (>=1.5.1,<1.6.0)"] +test = ["coverage[toml]", "tox"] + [[package]] name = "djangorestframework-recursive" version = "0.1.2" @@ -1280,13 +1171,13 @@ test = ["cryptography", "pytest", "pytest-cov", "pytest-django", "pytest-xdist", [[package]] name = "djoser" -version = "2.2.0" +version = "2.2.2" description = "REST implementation of Django authentication system." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "djoser-2.2.0-py3-none-any.whl", hash = "sha256:7b24718cdc51b4294b0abcf6bf0ead11aa3ca83652e351dfb04b7b8b15afa3b0"}, - {file = "djoser-2.2.0.tar.gz", hash = "sha256:4aa48502df870c8b5f07109ad4a749cc881c37bb5efa85cf5462ea695a0dca8c"}, + {file = "djoser-2.2.2-py3-none-any.whl", hash = "sha256:efb91ad61e4d5b8d664db029b5947df9d34078289ef2680a1ab665e047144b74"}, + {file = "djoser-2.2.2.tar.gz", hash = "sha256:9deb831a1c8781ceff325699e1407b4e1be8b4588e87071621d88ba31c09349f"}, ] [package.dependencies] @@ -1518,16 +1409,14 @@ name = "flagsmith-task-processor" version = "1.0.0" description = "Task Processor plugin for Flagsmith application." optional = false -python-versions = "^3.10" +python-versions = ">=3.10,<4.0" files = [] develop = false [package.dependencies] backoff = "~2.2.1" dj-database-url = "~0.5.0" -django = "^3.2.23" django-health-check = "~3.14.3" -djangorestframework = "~3.12.1" drf-yasg = "~1.21.6" environs = "~9.2.0" psycopg2-binary = "~2.9.5" @@ -1535,9 +1424,9 @@ simplejson = "~3.19.1" [package.source] type = "git" -url = "https://github.com/Flagsmith/flagsmith-task-processor.git" -reference = "init" -resolved_reference = "cf373be4ad6a7fcabc1c7b2c8125b753648d0e22" +url = "https://git@github.com/Flagsmith/flagsmith-task-processor" +reference = "0.1.0.alpha4" +resolved_reference = "a3af43a0d8cb8a3cf099585007fa4ea55ac17217" [[package]] name = "flake8" @@ -1569,76 +1458,6 @@ files = [ [package.dependencies] python-dateutil = ">=2.7" -[[package]] -name = "frozenlist" -version = "1.4.0" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, -] - [[package]] name = "genson" version = "1.2.2" @@ -2385,16 +2204,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2553,105 +2362,6 @@ portalocker = [ {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, ] -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - [[package]] name = "mypy-boto3-dynamodb" version = "1.33.0" @@ -2663,6 +2373,9 @@ files = [ {file = "mypy_boto3_dynamodb-1.33.0-py3-none-any.whl", hash = "sha256:619ea2cc311ced0ecb44b6e8d3bf3dd851fb7c53a34128b4ff6d6e6a11fdd41f"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -3851,7 +3564,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3897,19 +3609,22 @@ files = [ {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, ] +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + [package.extras] hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"}, + {file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"}, ] [package.dependencies] @@ -4035,10 +3750,72 @@ files = [ {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, ] +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} + [package.extras] docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "rudder-sdk-python" version = "2.0.2" @@ -4334,20 +4111,6 @@ files = [ optional = ["SQLAlchemy (>=1,<2)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<10)"] testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Werkzeug (<2)", "black (==21.7b0)", "boto3 (<=2)", "codecov (>=2,<3)", "databases (>=0.3)", "flake8 (>=3,<4)", "moto (<2)", "psutil (>=5,<6)", "pytest (>=5.4,<6)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] -[[package]] -name = "smsapi-client" -version = "2.7.0" -description = "SmsAPI client" -optional = false -python-versions = "*" -files = [ - {file = "smsapi-client-2.7.0.tar.gz", hash = "sha256:9de0932faaaf0c36fd279a11b5054f3ca24cf6bf58be3235316c15346ddcce82"}, - {file = "smsapi_client-2.7.0-py2.py3-none-any.whl", hash = "sha256:fd101101ed74fde0f24e663398f3648dd5a56954ec9576d509da087641f73da8"}, -] - -[package.dependencies] -requests = "*" - [[package]] name = "social-auth-app-django" version = "5.4.1" @@ -4438,24 +4201,6 @@ files = [ {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] -[[package]] -name = "twilio" -version = "8.5.0" -description = "Twilio API client and TwiML generator" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "twilio-8.5.0-py2.py3-none-any.whl", hash = "sha256:a6fdea2252cb7a8a47b5750d58abe1888bba9777482bac8e9bc3be47970facc7"}, - {file = "twilio-8.5.0.tar.gz", hash = "sha256:f55da9b485f9070aef09836e56230d0e6fd83811d2e6668f20d9057dd3668143"}, -] - -[package.dependencies] -aiohttp = ">=3.8.4" -aiohttp-retry = ">=2.8.3" -PyJWT = ">=2.0.0,<3.0.0" -pytz = "*" -requests = ">=2.0.0" - [[package]] name = "types-toml" version = "0.10.8.7" @@ -4603,19 +4348,18 @@ name = "workflows-logic" version = "2.0.0" description = "Workflows logic plugin for Flagsmith application." optional = false -python-versions = "^3.10" +python-versions = ">=3.10,<4.0" files = [] develop = false [package.dependencies] -django = "~3.2.23" -flagsmith_task_processor = {git = "https://github.com/Flagsmith/flagsmith-task-processor.git", branch = "init"} +flagsmith-task-processor = {git = "https://git@github.com/Flagsmith/flagsmith-task-processor", rev = "0.1.0.alpha4"} [package.source] type = "git" url = "https://github.com/flagsmith/flagsmith-workflows" -reference = "v2.3.1" -resolved_reference = "a06e192c72cbd508f7b7ac696d0e64be2f8632f1" +reference = "v2.3.4" +resolved_reference = "8024294ca27064f34e0f2c8b5219598f1e34eb0c" [[package]] name = "wrapt" @@ -4731,108 +4475,7 @@ files = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "yubico-client" -version = "1.13.0" -description = "Library for verifying Yubikey One Time Passwords (OTPs)" -optional = false -python-versions = "*" -files = [ - {file = "yubico-client-1.13.0.tar.gz", hash = "sha256:e3b86cd2a123105edfacad40551c7b26e9c1193d81ffe168ee704ebfd3d11162"}, - {file = "yubico_client-1.13.0-py2.py3-none-any.whl", hash = "sha256:59d818661f638e3f041fae44ba2c0569e4eb2a17865fa7cc9ad6577185c4d185"}, -] - -[package.dependencies] -requests = ">=2.7,<3.0" - [metadata] lock-version = "2.0" -python-versions = "~3.12" -content-hash = "6bb4ffb389ab5fa15347895d977dfca1703ef1b31efc93f386daf410f1a8aeb3" +python-versions = ">=3.11, <3.13" +content-hash = "5e1a10d25dfba65b6f275786cd40158cf377d0eab2af331c5f4d8f9dc88d233b" diff --git a/api/pyproject.toml b/api/pyproject.toml index e9875b48dce3..e1b0e104de53 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -53,17 +53,17 @@ authors = ["Flagsmith "] readme = "readme.md" [tool.poetry.dependencies] -python = "~3.12" +python = ">=3.11, <3.13" django = "~3.2.25" rudder-sdk-python = "~2.0.2" segment-analytics-python = "~2.2.3" backoff = "~2.2.1" appdirs = "~1.4.4" django-cors-headers = "~3.5.0" -djangorestframework = "~3.12.1" +djangorestframework = "~3.15.1" gunicorn = "~22.0.0" pyparsing = "~2.4.7" -requests = "~2.31.0" +requests = "~2.32.0" six = "~1.16.0" whitenoise = "~6.0.0" dj-database-url = "~0.5.0" @@ -99,8 +99,7 @@ pymemcache = "~4.0.0" google-re2 = "^1.0" django-softdelete = "~0.10.5" simplejson = "~3.19.1" -djoser = "~2.2.0" -django-trench = "~0.2.3" +djoser = "~2.2.2" django-storages = "~1.10.1" django-environ = "~0.4.5" influxdb-client = "~1.28.0" @@ -114,6 +113,8 @@ python-gnupg = "^0.5.1" django-redis = "^5.4.0" pygithub = "2.1.1" hubspot-api-client = "^8.2.1" +djangorestframework-dataclasses = "^1.3.1" +pyotp = "^2.9.0" [tool.poetry.group.auth-controller] optional = true @@ -137,7 +138,7 @@ flagsmith-ldap = { git = "https://github.com/flagsmith/flagsmith-ldap", tag = "v optional = true [tool.poetry.group.workflows.dependencies] -workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.3.1" } +workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.3.4" } [tool.poetry.group.dev.dependencies] django-test-migrations = "~1.2.0" diff --git a/api/sales_dashboard/views.py b/api/sales_dashboard/views.py index 3c4cfbfb84e5..d03d167b978b 100644 --- a/api/sales_dashboard/views.py +++ b/api/sales_dashboard/views.py @@ -121,7 +121,7 @@ def get_context_data(self, **kwargs): @staff_member_required -def organisation_info(request, organisation_id): +def organisation_info(request: HttpRequest, organisation_id: int) -> HttpResponse: organisation = get_object_or_404( Organisation.objects.select_related("subscription"), pk=organisation_id ) @@ -154,8 +154,10 @@ def organisation_info(request, organisation_id): date_range = request.GET.get("date_range", "180d") context["date_range"] = date_range + date_start = f"-{date_range}" + date_stop = "now()" event_list, labels = get_event_list_for_organisation( - organisation_id, date_range + organisation_id, date_start, date_stop ) context["event_list"] = event_list context["traits"] = mark_safe(json.dumps(event_list["traits"])) @@ -168,7 +170,9 @@ def organisation_info(request, organisation_id): context["api_calls"] = { # TODO: this could probably be reduced to a single influx request # rather than 3 - range_: get_events_for_organisation(organisation_id, date_range=range_) + range_: get_events_for_organisation( + organisation_id, date_start=f"-{range_}" + ) for range_ in ("24h", "7d", "30d") } diff --git a/api/segments/serializers.py b/api/segments/serializers.py index 1909ddebafd7..fccbeecf45aa 100644 --- a/api/segments/serializers.py +++ b/api/segments/serializers.py @@ -61,7 +61,9 @@ def validate(self, attrs): return attrs def get_project(self, validated_data: dict = None) -> Project: - return validated_data.get("project") + return validated_data.get("project") or Project.objects.get( + id=self.context["view"].kwargs["project_pk"] + ) def create(self, validated_data): project = validated_data["project"] diff --git a/api/segments/views.py b/api/segments/views.py index 63673b02447a..566dc9a6c1a1 100644 --- a/api/segments/views.py +++ b/api/segments/views.py @@ -50,6 +50,7 @@ def get_queryset(self): "rules__rules", "rules__rules__conditions", "rules__rules__rules", + "metadata", ) query_serializer = SegmentListQuerySerializer(data=self.request.query_params) diff --git a/api/task_processor/decorators.py b/api/task_processor/decorators.py index 793875c99a47..1399b5344608 100644 --- a/api/task_processor/decorators.py +++ b/api/task_processor/decorators.py @@ -99,9 +99,10 @@ def delay( def run_in_thread( self, *, - args: tuple[typing.Any] = (), + args: tuple[typing.Any, ...] = (), kwargs: dict[str, typing.Any] | None = None, ) -> None: + kwargs = kwargs or {} _validate_inputs(*args, **kwargs) thread = Thread(target=self.unwrapped, args=args, kwargs=kwargs, daemon=True) diff --git a/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py b/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py index 81e1d7c5996e..c52d5c5c569c 100644 --- a/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py +++ b/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py @@ -5,292 +5,322 @@ import pyotp from django.conf import settings from django.core import mail -from django.core.cache import cache from django.urls import reverse from rest_framework import status -from rest_framework.test import APIClient, APITestCase, override_settings +from rest_framework.test import APIClient, override_settings from organisations.invites.models import Invite from organisations.models import Organisation from users.models import FFAdminUser -class AuthIntegrationTestCase(APITestCase): - test_email = "test@example.com" +def test_register_and_login_workflows(db: None, api_client: APIClient) -> None: + # try to register without first_name / last_name + email = "test@example.com" password = FFAdminUser.objects.make_random_password() + register_data = { + "email": email, + "password": password, + "re_password": password, + } + register_url = reverse("api-v1:custom_auth:ffadminuser-list") + register_response_fail = api_client.post(register_url, data=register_data) - def setUp(self) -> None: - self.organisation = Organisation.objects.create(name="Test Organisation") - - def tearDown(self) -> None: - FFAdminUser.objects.all().delete() - cache.clear() - - def test_register_and_login_workflows(self): - # try to register without first_name / last_name - register_data = { - "email": self.test_email, - "password": self.password, - "re_password": self.password, - } - register_url = reverse("api-v1:custom_auth:ffadminuser-list") - register_response_fail = self.client.post(register_url, data=register_data) - # should return 400 - assert register_response_fail.status_code == status.HTTP_400_BAD_REQUEST - - # now register with full data - register_data["first_name"] = "test" - register_data["last_name"] = "user" - register_response_success = self.client.post(register_url, data=register_data) - assert register_response_success.status_code == status.HTTP_201_CREATED - assert register_response_success.json()["key"] - - # now verify we can login with the same credentials - new_login_data = { - "email": self.test_email, - "password": self.password, - } - login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") - new_login_response = self.client.post(login_url, data=new_login_data) - assert new_login_response.status_code == status.HTTP_200_OK - assert new_login_response.json()["key"] - - # Oh no, we forgot our password - reset_password_url = reverse("api-v1:custom_auth:ffadminuser-reset-password") - reset_password_data = {"email": self.test_email} - reset_password_response = self.client.post( - reset_password_url, data=reset_password_data - ) - # API docs are incorrect, 204 is the correct status code for this endpoint - assert reset_password_response.status_code == status.HTTP_204_NO_CONTENT - # verify that the user has been emailed with their reset code - assert len(mail.outbox) == 1 - # get the url and grab the uid and token - url = re.findall(r"http\:\/\/.*", mail.outbox[0].body)[0] - split_url = url.split("/") - uid = split_url[-2] - token = split_url[-1] - - # confirm the reset and set the new password - new_password = FFAdminUser.objects.make_random_password() - reset_password_confirm_data = { - "uid": uid, - "token": token, - "new_password": new_password, - "re_new_password": new_password, - } - reset_password_confirm_url = reverse( - "api-v1:custom_auth:ffadminuser-reset-password-confirm" - ) - reset_password_confirm_response = self.client.post( - reset_password_confirm_url, data=reset_password_confirm_data - ) - assert reset_password_confirm_response.status_code == status.HTTP_204_NO_CONTENT - - # now check we can login with the new details - new_login_data = { - "email": self.test_email, - "password": new_password, - } - new_login_response = self.client.post(login_url, data=new_login_data) - assert new_login_response.status_code == status.HTTP_200_OK - assert new_login_response.json()["key"] - - @override_settings(ALLOW_REGISTRATION_WITHOUT_INVITE=False) - def test_cannot_register_without_invite_if_disabled(self): - # Given - register_data = { - "email": self.test_email, - "password": self.password, - "first_name": "test", - "last_name": "register", - } - - # When - url = reverse("api-v1:custom_auth:ffadminuser-list") - response = self.client.post(url, data=register_data) + assert register_response_fail.status_code == status.HTTP_400_BAD_REQUEST - # Then - assert response.status_code == status.HTTP_403_FORBIDDEN - - @override_settings(ALLOW_REGISTRATION_WITHOUT_INVITE=False) - def test_can_register_with_invite_if_registration_disabled_without_invite(self): - # Given - register_data = { - "email": self.test_email, - "password": self.password, - "first_name": "test", - "last_name": "register", - } - Invite.objects.create(email=self.test_email, organisation=self.organisation) - - # When - url = reverse("api-v1:custom_auth:ffadminuser-list") - response = self.client.post(url, data=register_data) + # now register with full data + register_data["first_name"] = "test" + register_data["last_name"] = "user" + register_response_success = api_client.post(register_url, data=register_data) + assert register_response_success.status_code == status.HTTP_201_CREATED + assert register_response_success.json()["key"] - # Then - assert response.status_code == status.HTTP_201_CREATED + # now verify we can login with the same credentials + new_login_data = { + "email": email, + "password": password, + } + login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") + new_login_response = api_client.post(login_url, data=new_login_data) + assert new_login_response.status_code == status.HTTP_200_OK + assert new_login_response.json()["key"] + + # Oh no, we forgot our password + reset_password_url = reverse("api-v1:custom_auth:ffadminuser-reset-password") + reset_password_data = {"email": email} + reset_password_response = api_client.post( + reset_password_url, data=reset_password_data + ) + # API docs are incorrect, 204 is the correct status code for this endpoint + assert reset_password_response.status_code == status.HTTP_204_NO_CONTENT + # verify that the user has been emailed with their reset code + assert len(mail.outbox) == 1 + # get the url and grab the uid and token + url = re.findall(r"http\:\/\/.*", mail.outbox[0].body)[0] + split_url = url.split("/") + uid = split_url[-2] + token = split_url[-1] + + # confirm the reset and set the new password + new_password = FFAdminUser.objects.make_random_password() + reset_password_confirm_data = { + "uid": uid, + "token": token, + "new_password": new_password, + "re_new_password": new_password, + } + reset_password_confirm_url = reverse( + "api-v1:custom_auth:ffadminuser-reset-password-confirm" + ) + reset_password_confirm_response = api_client.post( + reset_password_confirm_url, data=reset_password_confirm_data + ) + assert reset_password_confirm_response.status_code == status.HTTP_204_NO_CONTENT - @override_settings( - DJOSER=ChainMap( - {"SEND_ACTIVATION_EMAIL": True, "SEND_CONFIRMATION_EMAIL": False}, - settings.DJOSER, - ) + # now check we can login with the new details + new_login_data = { + "email": email, + "password": new_password, + } + new_login_response = api_client.post(login_url, data=new_login_data) + assert new_login_response.status_code == status.HTTP_200_OK + assert new_login_response.json()["key"] + + +@override_settings(ALLOW_REGISTRATION_WITHOUT_INVITE=False) +def test_cannot_register_without_invite_if_disabled( + db: None, api_client: APIClient +) -> None: + # Given + email = "test@example.com" + password = FFAdminUser.objects.make_random_password() + register_data = { + "email": email, + "password": password, + "first_name": "test", + "last_name": "register", + } + + # When + url = reverse("api-v1:custom_auth:ffadminuser-list") + response = api_client.post(url, data=register_data) + + # Then + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@override_settings(ALLOW_REGISTRATION_WITHOUT_INVITE=False) +def test_can_register_with_invite_if_registration_disabled_without_invite( + db: None, + api_client: APIClient, +) -> None: + # Given + email = "test@example.com" + password = FFAdminUser.objects.make_random_password() + organisation = Organisation.objects.create(name="Test Organisation") + register_data = { + "email": email, + "password": password, + "first_name": "test", + "last_name": "register", + } + Invite.objects.create(email=email, organisation=organisation) + + # When + url = reverse("api-v1:custom_auth:ffadminuser-list") + response = api_client.post(url, data=register_data) + + # Then + assert response.status_code == status.HTTP_201_CREATED + + +@override_settings( + DJOSER=ChainMap( + {"SEND_ACTIVATION_EMAIL": True, "SEND_CONFIRMATION_EMAIL": False}, + settings.DJOSER, ) - def test_registration_and_login_with_user_activation_flow(self): - """ - Test user registration and login flow via email activation. - By default activation flow is disabled - """ - - # Given user registration data - register_data = { - "email": self.test_email, - "password": self.password, - "first_name": "test", - "last_name": "register", - } - - # When register - register_url = reverse("api-v1:custom_auth:ffadminuser-list") - result = self.client.post( - register_url, data=register_data, status_code=status.HTTP_201_CREATED - ) - - # Then success and account inactive - self.assertIn("key", result.data) - self.assertIn("is_active", result.data) - assert not result.data["is_active"] - - new_user = FFAdminUser.objects.latest("id") - self.assertEqual(new_user.email, register_data["email"]) - self.assertFalse(new_user.is_active) - - # And login should fail as we have not activated account yet - login_data = { - "email": self.test_email, - "password": self.password, - } - login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") - failed_login_res = self.client.post(login_url, data=login_data) - # should return 400 - assert failed_login_res.status_code == status.HTTP_400_BAD_REQUEST - - # verify that the user has been emailed activation email - # and extract uid and token for account activation - assert len(mail.outbox) == 1 - # get the url and grab the uid and token - url = re.findall(r"http\:\/\/.*", mail.outbox[0].body)[0] - split_url = url.split("/") - uid = split_url[-2] - token = split_url[-1] - - activate_data = {"uid": uid, "token": token} - - activate_url = reverse("api-v1:custom_auth:ffadminuser-activation") - # And activate account - self.client.post( - activate_url, data=activate_data, status_code=status.HTTP_204_NO_CONTENT - ) - - # And login success - login_result = self.client.post(login_url, data=login_data) - assert login_result.status_code == status.HTTP_200_OK - self.assertIn("key", login_result.data) - - def test_login_workflow_with_mfa_enabled(self): - # register the user - register_data = { - "email": self.test_email, - "password": self.password, - "re_password": self.password, - "first_name": "test", - "last_name": "user", - } - register_url = reverse("api-v1:custom_auth:ffadminuser-list") - register_response = self.client.post(register_url, data=register_data) - assert register_response.status_code == status.HTTP_201_CREATED - key = register_response.json()["key"] - - # authenticate the test client - self.client.credentials(HTTP_AUTHORIZATION=f"Token {key}") - - # create an MFA method - create_mfa_method_url = reverse( - "api-v1:custom_auth:mfa-activate", kwargs={"method": "app"} - ) - create_mfa_response = self.client.post(create_mfa_method_url) - assert create_mfa_response.status_code == status.HTTP_200_OK - secret = create_mfa_response.json()["secret"] - - # confirm the MFA method - totp = pyotp.TOTP(secret) - confirm_mfa_data = {"code": totp.now()} - confirm_mfa_method_url = reverse( - "api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "app"} - ) - confirm_mfa_method_response = self.client.post( - confirm_mfa_method_url, data=confirm_mfa_data - ) - assert confirm_mfa_method_response - - # now login should return an ephemeral token rather than a token - login_data = {"email": self.test_email, "password": self.password} - self.client.logout() - login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") - login_response = self.client.post(login_url, data=login_data) - assert login_response.status_code == status.HTTP_200_OK - ephemeral_token = login_response.json()["ephemeral_token"] - - # now we can confirm the login - confirm_login_data = {"ephemeral_token": ephemeral_token, "code": totp.now()} - login_confirm_url = reverse("api-v1:custom_auth:mfa-authtoken-login-code") - login_confirm_response = self.client.post( - login_confirm_url, data=confirm_login_data - ) - assert login_confirm_response.status_code == status.HTTP_200_OK - key = login_confirm_response.json()["key"] - - # and verify that we can use the token to access the API - self.client.credentials(HTTP_AUTHORIZATION=f"Token {key}") - current_user_url = reverse("api-v1:custom_auth:ffadminuser-me") - current_user_response = self.client.get(current_user_url) - assert current_user_response.status_code == status.HTTP_200_OK - assert current_user_response.json()["email"] == self.test_email - - @override_settings() - def test_throttle_login_workflows(self): - # verify that a throttle rate exists already then set it - # to something easier to reliably test - assert settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["login"] - settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["login"] = "1/sec" - - # register the user - register_data = { - "email": self.test_email, - "password": self.password, - "re_password": self.password, - "first_name": "test", - "last_name": "user", - } - register_url = reverse("api-v1:custom_auth:ffadminuser-list") - register_response = self.client.post(register_url, data=register_data) - assert register_response.status_code == status.HTTP_201_CREATED - assert register_response.json()["key"] - - # verify we can login with credentials - login_data = { - "email": self.test_email, - "password": self.password, - } - login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") - login_response = self.client.post(login_url, data=login_data) - assert login_response.status_code == status.HTTP_200_OK - assert login_response.json()["key"] - - # try login in again, should deny, current limit 1 per second - login_response = self.client.post(login_url, data=login_data) - assert login_response.status_code == status.HTTP_429_TOO_MANY_REQUESTS +) +def test_registration_and_login_with_user_activation_flow( + db: None, + api_client: APIClient, +) -> None: + """ + Test user registration and login flow via email activation. + By default activation flow is disabled + """ + + # Given user registration data + email = "test@example.com" + password = FFAdminUser.objects.make_random_password() + register_data = { + "email": email, + "password": password, + "first_name": "test", + "last_name": "register", + } + + # When register + register_url = reverse("api-v1:custom_auth:ffadminuser-list") + result = api_client.post( + register_url, data=register_data, status_code=status.HTTP_201_CREATED + ) + + # Then success and account inactive + assert "key" in result.data + assert "is_active" in result.data + assert not result.data["is_active"] + + new_user = FFAdminUser.objects.latest("id") + assert new_user.email == register_data["email"] + assert new_user.is_active is False + + # And login should fail as we have not activated account yet + login_data = { + "email": email, + "password": password, + } + login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") + failed_login_res = api_client.post(login_url, data=login_data) + # should return 400 + assert failed_login_res.status_code == status.HTTP_400_BAD_REQUEST + + # verify that the user has been emailed activation email + # and extract uid and token for account activation + assert len(mail.outbox) == 1 + # get the url and grab the uid and token + url = re.findall(r"http\:\/\/.*", mail.outbox[0].body)[0] + split_url = url.split("/") + uid = split_url[-2] + token = split_url[-1] + + activate_data = {"uid": uid, "token": token} + + activate_url = reverse("api-v1:custom_auth:ffadminuser-activation") + # And activate account + api_client.post( + activate_url, data=activate_data, status_code=status.HTTP_204_NO_CONTENT + ) + + # And login success + login_result = api_client.post(login_url, data=login_data) + assert login_result.status_code == status.HTTP_200_OK + assert "key" in login_result.data + + +def test_login_workflow_with_mfa_enabled( + db: None, + api_client: APIClient, +) -> None: + email = "test@example.com" + password = FFAdminUser.objects.make_random_password() + register_data = { + "email": email, + "password": password, + "re_password": password, + "first_name": "test", + "last_name": "user", + } + register_url = reverse("api-v1:custom_auth:ffadminuser-list") + register_response = api_client.post(register_url, data=register_data) + assert register_response.status_code == status.HTTP_201_CREATED + key = register_response.json()["key"] + + # authenticate the test client + api_client.credentials(HTTP_AUTHORIZATION=f"Token {key}") + + # create an MFA method + create_mfa_method_url = reverse( + "api-v1:custom_auth:mfa-activate", kwargs={"method": "app"} + ) + create_mfa_response = api_client.post(create_mfa_method_url) + assert create_mfa_response.status_code == status.HTTP_200_OK + secret = create_mfa_response.json()["secret"] + + # confirm the MFA method + totp = pyotp.TOTP(secret) + confirm_mfa_data = {"code": totp.now()} + confirm_mfa_method_url = reverse( + "api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "app"} + ) + confirm_mfa_method_response = api_client.post( + confirm_mfa_method_url, data=confirm_mfa_data + ) + assert confirm_mfa_method_response.status_code == status.HTTP_200_OK + backup_codes = confirm_mfa_method_response.json()["backup_codes"] + + # now login should return an ephemeral token rather than a token + login_data = {"email": email, "password": password} + api_client.logout() + login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") + login_response = api_client.post(login_url, data=login_data) + assert login_response.status_code == status.HTTP_200_OK + ephemeral_token = login_response.json()["ephemeral_token"] + + # now we can confirm the login + confirm_login_data = {"ephemeral_token": ephemeral_token, "code": totp.now()} + login_confirm_url = reverse("api-v1:custom_auth:mfa-authtoken-login-code") + login_confirm_response = api_client.post(login_confirm_url, data=confirm_login_data) + assert login_confirm_response.status_code == status.HTTP_200_OK + key = login_confirm_response.json()["key"] + + # Login with backup code should also work + api_client.logout() + login_response = api_client.post(login_url, data=login_data) + assert login_response.status_code == status.HTTP_200_OK + ephemeral_token = login_response.json()["ephemeral_token"] + confirm_login_data = { + "ephemeral_token": ephemeral_token, + "code": backup_codes[0], + } + login_confirm_response = api_client.post(login_confirm_url, data=confirm_login_data) + assert login_confirm_response.status_code == status.HTTP_200_OK + key = login_confirm_response.json()["key"] + + # and verify that we can use the token to access the API + api_client.credentials(HTTP_AUTHORIZATION=f"Token {key}") + current_user_url = reverse("api-v1:custom_auth:ffadminuser-me") + current_user_response = api_client.get(current_user_url) + assert current_user_response.status_code == status.HTTP_200_OK + assert current_user_response.json()["email"] == email + + +def test_throttle_login_workflows( + api_client: APIClient, + db: None, + reset_cache: None, +) -> None: + # verify that a throttle rate exists already then set it + # to something easier to reliably test + assert settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["login"] + settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["login"] = "1/sec" + + email = "test@example.com" + password = FFAdminUser.objects.make_random_password() + register_data = { + "email": email, + "password": password, + "re_password": password, + "first_name": "test", + "last_name": "user", + } + register_url = reverse("api-v1:custom_auth:ffadminuser-list") + register_response = api_client.post(register_url, data=register_data) + assert register_response.status_code == status.HTTP_201_CREATED + assert register_response.json()["key"] + + # verify we can login with credentials + login_data = { + "email": email, + "password": password, + } + login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") + login_response = api_client.post(login_url, data=login_data) + assert login_response.status_code == status.HTTP_200_OK + assert login_response.json()["key"] + + # try login in again, should deny, current limit 1 per second + login_response = api_client.post(login_url, data=login_data) + assert login_response.status_code == status.HTTP_429_TOO_MANY_REQUESTS def test_throttle_signup(api_client, settings, user_password, db, reset_cache): diff --git a/api/tests/test_helpers.py b/api/tests/test_helpers.py index ed75aca5dca6..3608f0470818 100644 --- a/api/tests/test_helpers.py +++ b/api/tests/test_helpers.py @@ -36,7 +36,7 @@ def generate_segment_data( def fix_issue_3869(): """ - Hack to get around Pydantic issue with Freeze Gun. + Hack to get around Pydantic issue with FreezeGun. https://github.com/Flagsmith/flagsmith/issues/3869 """ diff --git a/api/tests/unit/app_analytics/test_analytics_db_service.py b/api/tests/unit/app_analytics/test_analytics_db_service.py index 76f127bda0d3..7941d3d1de4a 100644 --- a/api/tests/unit/app_analytics/test_analytics_db_service.py +++ b/api/tests/unit/app_analytics/test_analytics_db_service.py @@ -1,4 +1,4 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta import pytest from app_analytics.analytics_db_service import ( @@ -8,6 +8,10 @@ get_usage_data, get_usage_data_from_local_db, ) +from app_analytics.constants import ( + CURRENT_BILLING_PERIOD, + PREVIOUS_BILLING_PERIOD, +) from app_analytics.models import ( APIUsageBucket, FeatureEvaluationBucket, @@ -16,9 +20,28 @@ from django.conf import settings from django.utils import timezone from pytest_django.fixtures import SettingsWrapper +from pytest_mock import MockerFixture from environments.models import Environment from features.models import Feature +from organisations.models import ( + Organisation, + OrganisationSubscriptionInformationCache, +) + + +@pytest.fixture +def cache(organisation: Organisation) -> OrganisationSubscriptionInformationCache: + yield OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=timezone.now() - timedelta(days=20), + current_billing_term_ends_at=timezone.now() + timedelta(days=10), + api_calls_24h=2000, + api_calls_7d=12000, + api_calls_30d=38000, + allowed_seats=5, + allowed_30d_api_calls=40000, + ) @pytest.mark.skipif( @@ -308,3 +331,90 @@ def test_get_feature_evaluation_data_calls_get_feature_evaluation_data_from_loca mocked_get_feature_evaluation_data_from_local_db.assert_called_once_with( feature=feature, environment_id=environment.id, period=30 ) + + +@pytest.mark.parametrize( + "period", + [ + CURRENT_BILLING_PERIOD, + PREVIOUS_BILLING_PERIOD, + ], +) +def test_get_usage_data_returns_empty_list_when_unset_subscription_information_cache( + period: str, + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, +) -> None: + # Given + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + assert getattr(organisation, "subscription_information_cache", None) is None + + # When + usage_data = get_usage_data(organisation, period=period) + + # Then + assert usage_data == [] + mocked_get_usage_data_from_local_db.assert_not_called() + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_usage_data_calls_get_usage_data_from_local_db_with_set_period_starts_at_with_current_billing_period( + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, + cache: OrganisationSubscriptionInformationCache, +) -> None: + # Given + period: str = CURRENT_BILLING_PERIOD + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + + assert getattr(organisation, "subscription_information_cache", None) == cache + + # When + get_usage_data(organisation, period=period) + + # Then + mocked_get_usage_data_from_local_db.assert_called_once_with( + organisation=organisation, + environment_id=None, + project_id=None, + date_start=datetime(2022, 12, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + date_stop=datetime(2023, 1, 19, 9, 9, 47, 325132, tzinfo=timezone.utc), + ) + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_usage_data_calls_get_usage_data_from_local_db_with_set_period_starts_at_with_previous_billing_period( + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, + cache: OrganisationSubscriptionInformationCache, +) -> None: + # Given + period: str = PREVIOUS_BILLING_PERIOD + + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + + assert getattr(organisation, "subscription_information_cache", None) == cache + + # When + get_usage_data(organisation, period=period) + + # Then + mocked_get_usage_data_from_local_db.assert_called_once_with( + organisation=organisation, + environment_id=None, + project_id=None, + date_start=datetime(2022, 11, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + date_stop=datetime(2022, 12, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + ) diff --git a/api/tests/unit/app_analytics/test_migrate_to_pg.py b/api/tests/unit/app_analytics/test_migrate_to_pg.py new file mode 100644 index 000000000000..d744f631c42a --- /dev/null +++ b/api/tests/unit/app_analytics/test_migrate_to_pg.py @@ -0,0 +1,67 @@ +import pytest +from app_analytics.migrate_to_pg import migrate_feature_evaluations +from app_analytics.models import FeatureEvaluationBucket +from django.conf import settings +from django.utils import timezone +from pytest_mock import MockerFixture + + +@pytest.mark.skipif( + "analytics" not in settings.DATABASES, + reason="Skip test if analytics database is not configured", +) +@pytest.mark.django_db(databases=["analytics", "default"]) +def test_migrate_feature_evaluations(mocker: MockerFixture) -> None: + # Given + feature_name = "test_feature_one" + environment_id = "1" + + # mock the read bucket name + read_bucket = "test_bucket" + mocker.patch("app_analytics.migrate_to_pg.read_bucket", read_bucket) + + # Next, mock the influx client and create some records + mock_influxdb_client = mocker.patch("app_analytics.migrate_to_pg.influxdb_client") + mock_query_api = mock_influxdb_client.query_api.return_value + mock_tables = [] + for i in range(3): + mock_record = mocker.MagicMock( + values={"feature_id": feature_name, "environment_id": environment_id}, + spec_set=["values", "get_time", "get_value"], + ) + mock_record.get_time.return_value = timezone.now() - timezone.timedelta(days=i) + mock_record.get_value.return_value = 100 + + mock_table = mocker.MagicMock(records=[mock_record], spec_set=["records"]) + mock_tables.append(mock_table) + + mock_query_api.query.side_effect = [[table] for table in mock_tables] + + # When + migrate_feature_evaluations(migrate_till=3) + + # Then - only 3 records should be created + assert FeatureEvaluationBucket.objects.count() == 3 + assert ( + FeatureEvaluationBucket.objects.filter( + feature_name=feature_name, + environment_id=environment_id, + bucket_size=15, + total_count=100, + ).count() + == 3 + ) + # And, the query should have been called 3 times + mock_query_api.assert_has_calls( + [ + mocker.call.query( + f"from (bucket: {read_bucket}) |> range(start: -1d, stop: -0d)" + ), + mocker.call.query( + f"from (bucket: {read_bucket}) |> range(start: -2d, stop: -1d)" + ), + mocker.call.query( + f"from (bucket: {read_bucket}) |> range(start: -3d, stop: -2d)" + ), + ] + ) diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py index 04278f7bad38..0a590737410a 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py @@ -1,4 +1,4 @@ -from datetime import date +from datetime import date, timedelta from typing import Generator, Type from unittest import mock from unittest.mock import MagicMock @@ -14,13 +14,18 @@ get_feature_evaluation_data, get_multiple_event_list_for_feature, get_multiple_event_list_for_organisation, + get_top_organisations, get_usage_data, ) from django.conf import settings +from django.utils import timezone from influxdb_client.client.exceptions import InfluxDBError from influxdb_client.rest import ApiException +from pytest_mock import MockerFixture from urllib3.exceptions import HTTPError +from organisations.models import Organisation + # Given org_id = 123 env_id = 1234 @@ -178,6 +183,7 @@ def test_influx_db_query_when_get_events_list_then_query_api_called(monkeypatch) def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_api_called( monkeypatch, project_id, environment_id, expected_filters ): + expected_query = ( ( f'from(bucket:"{read_bucket}") ' @@ -271,7 +277,11 @@ def test_get_usage_data(mocker): # Then mocked_get_multiple_event_list_for_organisation.assert_called_once_with( - org_id, None, None + organisation_id=org_id, + environment_id=None, + project_id=None, + date_start="-30d", + date_stop="now()", ) assert len(usage_data) == 2 @@ -309,7 +319,7 @@ def test_get_feature_evaluation_data(mocker): # Then mocked_get_multiple_event_list_for_feature.assert_called_once_with( - feature_name=feature_name, environment_id=env_id, period="30d" + feature_name=feature_name, environment_id=env_id, date_start="-30d" ) assert len(feature_evaluation_data) == 2 @@ -319,3 +329,113 @@ def test_get_feature_evaluation_data(mocker): assert feature_evaluation_data[1].day == date(year=2023, month=1, day=9) assert feature_evaluation_data[1].count == 200 + + +@pytest.mark.parametrize("date_stop", ["now()", "-5d"]) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_event_list_for_organisation_with_date_stop_set_to_now_and_previously( + date_stop: str, + mocker: MockerFixture, + organisation: Organisation, +) -> None: + # Given + now = timezone.now() + one_day_ago = now - timedelta(days=1) + two_days_ago = now - timedelta(days=2) + + record_mock1 = mock.MagicMock() + record_mock1.__getitem__.side_effect = lambda key: { + "resource": "resource23", + "_value": 23, + }.get(key) + record_mock1.values = {"_time": one_day_ago} + + record_mock2 = mock.MagicMock() + record_mock2.__getitem__.side_effect = lambda key: { + "resource": "resource24", + "_value": 24, + }.get(key) + record_mock2.values = {"_time": two_days_ago} + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset, labels = get_event_list_for_organisation( + organisation_id=organisation.id, + date_stop=date_stop, + ) + + # Then + assert dataset == {"resource23": [23], "resource24": [24]} + assert labels == ["2023-01-18", "2023-01-17"] + + +@pytest.mark.parametrize("limit", ["10", ""]) +def test_get_top_organisations( + limit: str, + mocker: MockerFixture, +) -> None: + # Given + mocker.patch("app_analytics.influxdb_wrapper.range_bucket_mappings") + + record_mock1 = mock.MagicMock() + record_mock1.values = {"organisation": "123-TestOrg"} + record_mock1.get_value.return_value = 23 + + record_mock2 = mock.MagicMock() + record_mock2.values = {"organisation": "456-TestCorp"} + record_mock2.get_value.return_value = 43 + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset = get_top_organisations(date_start="-30d", limit=limit) + + # Then + assert dataset == {123: 23, 456: 43} + + +def test_get_top_organisations_value_error( + mocker: MockerFixture, +) -> None: + # Given + mocker.patch("app_analytics.influxdb_wrapper.range_bucket_mappings") + + record_mock1 = mock.MagicMock() + record_mock1.values = {"organisation": "BadData-TestOrg"} + record_mock1.get_value.return_value = 23 + + record_mock2 = mock.MagicMock() + record_mock2.values = {"organisation": "456-TestCorp"} + record_mock2.get_value.return_value = 43 + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset = get_top_organisations(date_start="-30d") + + # Then + # The wrongly typed data does not stop the remaining data + # from being returned. + assert dataset == {456: 43} diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_track.py b/api/tests/unit/app_analytics/test_unit_app_analytics_track.py index aa8c8ea67ff6..624f2977a882 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_track.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_track.py @@ -2,9 +2,11 @@ import pytest from app_analytics.track import ( + track_feature_evaluation_influxdb, track_request_googleanalytics, track_request_influxdb, ) +from pytest_mock import MockerFixture @pytest.mark.parametrize( @@ -129,3 +131,36 @@ def test_track_request_does_not_send_data_to_influxdb_for_not_tracked_uris( # Then MockInfluxDBWrapper.assert_not_called() + + +def test_track_feature_evaluation_influxdb(mocker: MockerFixture) -> None: + # Given + mock_influxdb_wrapper = mock.MagicMock() + mocker.patch( + "app_analytics.track.InfluxDBWrapper", return_value=mock_influxdb_wrapper + ) + + data = { + "foo": 12, + "bar": 19, + "baz": 44, + } + environment_id = 1 + + # When + track_feature_evaluation_influxdb( + environment_id=environment_id, feature_evaluations=data + ) + + # Then + calls = mock_influxdb_wrapper.add_data_point.call_args_list + assert len(calls) == 3 + for i, feature_name in enumerate(data): + assert calls[i].args[0] == "request_count" + assert calls[i].args[1] == data[feature_name] + assert calls[i].kwargs["tags"] == { + "environment_id": environment_id, + "feature_id": feature_name, + } + + mock_influxdb_wrapper.write.assert_called_once_with() diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py index 9137584dc6b5..a9276fa868ea 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py @@ -2,11 +2,17 @@ from datetime import date, timedelta import pytest +from app_analytics.constants import ( + CURRENT_BILLING_PERIOD, + NINETY_DAY_PERIOD, + PREVIOUS_BILLING_PERIOD, +) from app_analytics.dataclasses import UsageData from app_analytics.models import FeatureEvaluationRaw from app_analytics.views import SDKAnalyticsFlags from django.conf import settings from django.urls import reverse +from django.utils import timezone from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture from rest_framework import status @@ -15,6 +21,10 @@ from environments.identities.models import Identity from environments.models import Environment from features.models import Feature +from organisations.models import ( + Organisation, + OrganisationSubscriptionInformationCache, +) def test_sdk_analytics_does_not_allow_bad_data(mocker, settings, environment): @@ -60,12 +70,15 @@ def test_sdk_analytics_allows_valid_data(mocker, settings, environment, feature) # Then assert response.status_code == status.HTTP_200_OK - mocked_track_feature_eval.delay.assert_called_once_with(args=(environment.id, data)) + mocked_track_feature_eval.run_in_thread.assert_called_once_with( + args=(environment.id, data) + ) def test_get_usage_data(mocker, admin_client, organisation): # Given url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + mocked_get_usage_data = mocker.patch( "app_analytics.views.get_usage_data", autospec=True, @@ -96,7 +109,190 @@ def test_get_usage_data(mocker, admin_client, organisation): "environment_document": 0, }, ] - mocked_get_usage_data.assert_called_once_with(organisation) + mocked_get_usage_data.assert_called_once_with(organisation, period=None) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__current_billing_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={CURRENT_BILLING_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today()), + UsageData(flags=10, day=date.today() - timedelta(days=1)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today()), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=1)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-28d", + date_stop="now()", + ) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__previous_billing_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={PREVIOUS_BILLING_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today() - timedelta(days=29)), + UsageData(flags=10, day=date.today() - timedelta(days=30)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today() - timedelta(days=29)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=30)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-59d", + date_stop="-28d", + ) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__ninety_day_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={NINETY_DAY_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today()), + UsageData(flags=10, day=date.today() - timedelta(days=1)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today()), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=1)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-90d", + date_stop="now()", + ) def test_get_usage_data_for_non_admin_user_returns_403( @@ -251,8 +447,10 @@ def test_set_sdk_analytics_flags_v1_to_influxdb( api_client.credentials(HTTP_X_ENVIRONMENT_KEY=environment.api_key) feature_request_count = 2 data = {feature.name: feature_request_count} - mock = mocker.patch("app_analytics.track.InfluxDBWrapper") - add_data_point_mock = mock.return_value.add_data_point + + mocked_track_feature_eval = mocker.patch( + "app_analytics.views.track_feature_evaluation_influxdb" + ) # When response = api_client.post( @@ -261,8 +459,9 @@ def test_set_sdk_analytics_flags_v1_to_influxdb( # Then assert response.status_code == status.HTTP_200_OK - add_data_point_mock.assert_called_with( - "request_count", - feature_request_count, - tags={"feature_id": feature.name, "environment_id": environment.id}, + mocked_track_feature_eval.run_in_thread.assert_called_once_with( + args=( + environment.id, + data, + ) ) diff --git a/api/tests/unit/audit/test_unit_audit_tasks.py b/api/tests/unit/audit/test_unit_audit_tasks.py index dfac633c8180..f6ee29c250e0 100644 --- a/api/tests/unit/audit/test_unit_audit_tasks.py +++ b/api/tests/unit/audit/test_unit_audit_tasks.py @@ -14,6 +14,7 @@ ) from environments.models import Environment from features.models import Feature, FeatureSegment, FeatureState +from features.versioning.tasks import enable_v2_versioning from segments.models import Segment from users.models import FFAdminUser @@ -251,6 +252,56 @@ def test_create_segment_priorities_changed_audit_log( ).exists() +def test_create_segment_priorities_changed_audit_log_does_not_create_audit_log_for_versioned_feature_segments( + admin_user: FFAdminUser, + feature_segment: FeatureSegment, + feature: Feature, + segment_featurestate: FeatureState, + environment: Environment, +) -> None: + # Given + another_segment = Segment.objects.create( + project=environment.project, name="Another Segment" + ) + another_feature_segment = FeatureSegment.objects.create( + feature=feature, + environment=environment, + segment=another_segment, + ) + FeatureState.objects.create( + feature=feature, + environment=environment, + feature_segment=another_feature_segment, + ) + + now = timezone.now() + + enable_v2_versioning(environment.id) + + feature_segment.refresh_from_db() + another_feature_segment.refresh_from_db() + assert feature_segment.environment_feature_version_id is not None + assert another_feature_segment.environment_feature_version_id is not None + + # When + create_segment_priorities_changed_audit_log( + previous_id_priority_pairs=[ + (feature_segment.id, 0), + (another_feature_segment.id, 1), + ], + feature_segment_ids=[feature_segment.id, another_feature_segment.id], + user_id=admin_user.id, + changed_at=now.isoformat(), + ) + + # Then + assert not AuditLog.objects.filter( + environment=environment, + log=f"Segment overrides re-ordered for feature '{feature.name}'.", + created_date=now, + ).exists() + + def test_create_feature_state_went_live_audit_log( change_request_feature_state: FeatureState, ) -> None: diff --git a/api/tests/unit/audit/test_unit_audit_views.py b/api/tests/unit/audit/test_unit_audit_views.py index f64ba6caedba..9c10d46dd4f1 100644 --- a/api/tests/unit/audit/test_unit_audit_views.py +++ b/api/tests/unit/audit/test_unit_audit_views.py @@ -5,10 +5,15 @@ from rest_framework import status from rest_framework.test import APIClient +from audit.constants import ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE from audit.models import AuditLog +from audit.related_object_type import RelatedObjectType from environments.models import Environment +from features.models import Feature +from features.versioning.models import EnvironmentFeatureVersion from organisations.models import Organisation, OrganisationRole from projects.models import Project +from users.models import FFAdminUser def test_audit_log_can_be_filtered_by_environments( @@ -146,3 +151,38 @@ def test_admin_user_cannot_list_audit_log_of_another_organisation( # Then assert response.json()["count"] == 0 + + +def test_retrieve_environment_feature_version_published_audit_log_record_includes_required_fields( + admin_client: APIClient, + admin_user: FFAdminUser, + environment_v2_versioning: Environment, + feature: Feature, +) -> None: + # Given + new_version = EnvironmentFeatureVersion.objects.create( + feature=feature, + environment=environment_v2_versioning, + ) + new_version.publish(published_by=admin_user) + + audit_log = ( + AuditLog.objects.filter(related_object_type=RelatedObjectType.EF_VERSION.name) + .order_by("-created_date") + .first() + ) + url = reverse("api-v1:audit-detail", args=[audit_log.id]) + + # When + response = admin_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + + response_json = response.json() + assert response_json["related_object_uuid"] == str(new_version.uuid) + assert response_json["related_object_type"] == RelatedObjectType.EF_VERSION.name + assert ( + response_json["log"] + == ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE % feature.name + ) diff --git a/api/tests/unit/custom_auth/mfa/trench/__init__.py b/api/tests/unit/custom_auth/mfa/trench/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/api/tests/unit/custom_auth/mfa/trench/conftest.py b/api/tests/unit/custom_auth/mfa/trench/conftest.py new file mode 100644 index 000000000000..a56b3bd81c87 --- /dev/null +++ b/api/tests/unit/custom_auth/mfa/trench/conftest.py @@ -0,0 +1,30 @@ +import pytest + +from custom_auth.mfa.trench.command.create_secret import create_secret_command +from custom_auth.mfa.trench.command.replace_mfa_method_backup_codes import ( + regenerate_backup_codes_for_mfa_method_command, +) +from custom_auth.mfa.trench.models import MFAMethod +from users.models import FFAdminUser + + +@pytest.fixture() +def mfa_app_method(admin_user: FFAdminUser) -> MFAMethod: + mfa = MFAMethod.objects.create( + user=admin_user, + name="app", + secret=create_secret_command(), + is_active=True, + is_primary=True, + ) + # Generate backup codes + regenerate_backup_codes_for_mfa_method_command(admin_user.id, mfa.name) + return mfa + + +@pytest.fixture() +def deactivated_mfa_app_method(mfa_app_method: MFAMethod) -> MFAMethod: + mfa_app_method.is_active = False + mfa_app_method.is_primary = False + mfa_app_method.save() + return mfa_app_method diff --git a/api/tests/unit/custom_auth/mfa/trench/test_views.py b/api/tests/unit/custom_auth/mfa/trench/test_views.py new file mode 100644 index 000000000000..ad5e61e63d45 --- /dev/null +++ b/api/tests/unit/custom_auth/mfa/trench/test_views.py @@ -0,0 +1,222 @@ +import pyotp +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APIClient + +from custom_auth.mfa.trench.models import MFAMethod +from users.models import FFAdminUser + + +def test_list_user_active_methods(admin_client: APIClient, mfa_app_method: MFAMethod): + # Given + url = reverse("api-v1:custom_auth:mfa-list-user-active-methods") + + # When + response = admin_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + {"name": mfa_app_method.name, "is_primary": mfa_app_method.is_primary} + ] + + +def test_deactivate_user_active_method( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + url = reverse("api-v1:custom_auth:mfa-deactivate", args=[mfa_app_method.name]) + + # When + response = admin_client.post(url) + + # Then + assert response.status_code == status.HTTP_204_NO_CONTENT + mfa_app_method.refresh_from_db() + assert mfa_app_method.is_active is False + + +def test_deactivate_already_deactivated_mfa_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + mfa_app_method.is_active = False + mfa_app_method.is_primary = False + mfa_app_method.save() + + url = reverse("api-v1:custom_auth:mfa-deactivate", args=[mfa_app_method.name]) + + # When + response = admin_client.post(url) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()["error"] == "2FA is not enabled." + + +def test_activate_wrong_method_returns_404(admin_client: APIClient): + # Given + url = reverse("api-v1:custom_auth:mfa-activate", kwargs={"method": "wrong_method"}) + + # When + response = admin_client.post(url) + + # Then + assert response.status_code == status.HTTP_404_NOT_FOUND + + +def test_activate_mfa_with_existing_mfa_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + url = reverse("api-v1:custom_auth:mfa-activate", kwargs={"method": "app"}) + + # When + response = admin_client.post(url) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()["error"] == "MFA method already active." + + +def test_activate_confirm_with_wrong_method_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + totp = pyotp.TOTP(mfa_app_method.secret) + url = reverse( + "api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "wrong_method"} + ) + + # When + data = {"code": totp.now()} + response = admin_client.post(url, data=data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"code": ["Requested MFA method does not exist."]} + + +def test_activate_confirm_already_active_mfa_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + totp = pyotp.TOTP(mfa_app_method.secret) + url = reverse("api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "app"}) + + # When + data = {"code": totp.now()} + response = admin_client.post(url, data=data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"code": ["MFA method already active."]} + + +def test_re_activate_confirm_deactive_mfa_creates_new_backup_codes( + admin_client: APIClient, deactivated_mfa_app_method: MFAMethod +): + # Given + existing_backup_codes = deactivated_mfa_app_method + totp = pyotp.TOTP(deactivated_mfa_app_method.secret) + url = reverse("api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "app"}) + + # When + data = {"code": totp.now()} + response = admin_client.post(url, data=data) + + # Then + assert response.status_code == status.HTTP_200_OK + new_backup_codes = response.json()["backup_codes"] + for code in existing_backup_codes.backup_codes: + assert code not in new_backup_codes + + +def test_activate_confirm_mfa_for_different_user_retuns_400( + staff_client: APIClient, deactivated_mfa_app_method: MFAMethod +): + # Given + totp = pyotp.TOTP(deactivated_mfa_app_method.secret) + url = reverse("api-v1:custom_auth:mfa-activate-confirm", kwargs={"method": "app"}) + + # When + data = {"code": totp.now()} + response = staff_client.post(url, data=data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_activate_confirm_without_code_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + url = reverse( + "api-v1:custom_auth:mfa-activate-confirm", + kwargs={"method": mfa_app_method.name}, + ) + + # When + response = admin_client.post(url) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"code": ["This field is required."]} + + +def test_activate_confirm_with_wrong_code_returns_400( + admin_client: APIClient, mfa_app_method: MFAMethod +): + # Given + mfa_app_method.is_active = False + mfa_app_method.is_primary = False + mfa_app_method.save() + + url = reverse( + "api-v1:custom_auth:mfa-activate-confirm", + kwargs={"method": mfa_app_method.name}, + ) + data = {"code": "wrong_code"} + # When + response = admin_client.post(url, data=data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"code": ["Code invalid or expired."]} + + +def test_login_with_invalid_mfa_code_returns_401( + api_client: APIClient, admin_user: FFAdminUser, mfa_app_method: MFAMethod +): + # Given + login_url = reverse("api-v1:custom_auth:custom-mfa-authtoken-login") + data = {"email": admin_user.email, "password": "password"} + + login_response = api_client.post(login_url, data=data) + + ephemeral_token = login_response.json()["ephemeral_token"] + confirm_login_data = {"ephemeral_token": ephemeral_token, "code": "wrong_code"} + login_confirm_url = reverse("api-v1:custom_auth:mfa-authtoken-login-code") + + # When + login_confirm_response = api_client.post(login_confirm_url, data=confirm_login_data) + + # Then + assert login_confirm_response.status_code == status.HTTP_401_UNAUTHORIZED + assert login_confirm_response.json() == {"error": "Invalid or expired code."} + + +def test_login_with_invalid_mfa_token_returns_401( + api_client: APIClient, mfa_app_method: MFAMethod +): + # Given + totp = pyotp.TOTP(mfa_app_method.secret) + data = {"ephemeral_token": "wrong_token", "code": totp.now()} + url = reverse("api-v1:custom_auth:mfa-authtoken-login-code") + + # When + login_confirm_response = api_client.post(url, data=data) + + # Then + assert login_confirm_response.status_code == status.HTTP_401_UNAUTHORIZED + assert login_confirm_response.json() == {"error": "Invalid or expired token."} diff --git a/api/tests/unit/custom_auth/oauth/test_unit_oauth_github.py b/api/tests/unit/custom_auth/oauth/test_unit_oauth_github.py index e60ba880c5c6..f1ecc6db9eb1 100644 --- a/api/tests/unit/custom_auth/oauth/test_unit_oauth_github.py +++ b/api/tests/unit/custom_auth/oauth/test_unit_oauth_github.py @@ -1,4 +1,4 @@ -from unittest import TestCase, mock +from unittest import mock import pytest @@ -6,138 +6,153 @@ from custom_auth.oauth.github import NON_200_ERROR_MESSAGE, GithubUser -class GithubUserTestCase(TestCase): - def setUp(self) -> None: - self.test_client_id = "test-client-id" - self.test_client_secret = "test-client-secret" - - self.mock_requests = mock.patch("custom_auth.oauth.github.requests").start() - - def tearDown(self) -> None: - self.mock_requests.stop() - - def test_get_access_token_success(self): - # Given - test_code = "abc123" - expected_access_token = "access-token" - - self.mock_requests.post.return_value = mock.MagicMock( - text=f"access_token={expected_access_token}&scope=user&token_type=bearer", - status_code=200, - ) - - # When - github_user = GithubUser( - test_code, - client_id=self.test_client_id, - client_secret=self.test_client_secret, - ) - - # Then - assert github_user.access_token == expected_access_token - - assert self.mock_requests.post.call_count == 1 - request_calls = self.mock_requests.post.call_args - assert request_calls[1]["data"]["code"] == test_code - - def test_get_access_token_fail_non_200(self): - # Given - invalid_code = "invalid" - status_code = 400 - self.mock_requests.post.return_value = mock.MagicMock(status_code=status_code) - - # When - with pytest.raises(GithubError) as e: - GithubUser( - invalid_code, - client_id=self.test_client_id, - client_secret=self.test_client_secret, - ) - - # Then - exception raised - assert NON_200_ERROR_MESSAGE.format(status_code) in str(e) - - def test_get_access_token_fail_token_expired(self): - # Given - invalid_code = "invalid" - - error_description = "there+was+an+error" - self.mock_requests.post.return_value = mock.MagicMock( - text=f"error=bad_verification_code&error_description={error_description}", - status_code=200, - ) - - # When - with pytest.raises(GithubError) as e: - GithubUser( - invalid_code, - client_id=self.test_client_id, - client_secret=self.test_client_secret, - ) - - # Then - assert error_description.replace("+", " ") in str(e) - - def test_get_user_name_and_id(self): - # Given - # mock the post to get the access token - self.mock_requests.post.return_value = mock.MagicMock( - status_code=200, text="access_token=123456" +@mock.patch("custom_auth.oauth.github.requests") +def test_get_access_token_success_with_oauth_github( + mock_requests: mock.MagicMock, +) -> None: + # Given + test_code = "abc123" + expected_access_token = "access-token" + client_id = "test-client-id" + client_secret = "test-client-secret" + + mock_requests.post.return_value = mock.MagicMock( + text=f"access_token={expected_access_token}&scope=user&token_type=bearer", + status_code=200, + ) + + # When + github_user = GithubUser( + test_code, + client_id=client_id, + client_secret=client_secret, + ) + + # Then + assert github_user.access_token == expected_access_token + + assert mock_requests.post.call_count == 1 + request_calls = mock_requests.post.call_args + assert request_calls[1]["data"]["code"] == test_code + + +@mock.patch("custom_auth.oauth.github.requests") +def test_get_access_token_fail_non_200_with_github_oauth( + mock_requests: mock.MagicMock, +) -> None: + # Given + client_id = "test-client-id" + client_secret = "test-client-secret" + invalid_code = "invalid" + status_code = 400 + mock_requests.post.return_value = mock.MagicMock(status_code=status_code) + + # When + with pytest.raises(GithubError) as e: + GithubUser( + invalid_code, + client_id=client_id, + client_secret=client_secret, ) - # mock the get to get the user info - mock_response = mock.MagicMock(status_code=200) - self.mock_requests.get.return_value = mock_response - mock_response.json.return_value = {"name": "tommy tester", "id": 123456} - - # When - github_user = GithubUser( - "test-code", - client_id=self.test_client_id, - client_secret=self.test_client_secret, + # Then - exception raised + assert NON_200_ERROR_MESSAGE.format(status_code) in str(e) + + +@mock.patch("custom_auth.oauth.github.requests") +def test_get_access_token_fail_token_expired_with_github_oauth( + mock_requests: mock.MagicMock, +) -> None: + # Given + invalid_code = "invalid" + client_id = "test-client-id" + client_secret = "test-client-secret" + + error_description = "there+was+an+error" + mock_requests.post.return_value = mock.MagicMock( + text=f"error=bad_verification_code&error_description={error_description}", + status_code=200, + ) + + # When + with pytest.raises(GithubError) as e: + GithubUser( + invalid_code, + client_id=client_id, + client_secret=client_secret, ) - user_name_and_id = github_user._get_user_name_and_id() - # Then - assert user_name_and_id == { - "first_name": "tommy", - "last_name": "tester", - "github_user_id": 123456, + # Then + assert error_description.replace("+", " ") in str(e) + + +@mock.patch("custom_auth.oauth.github.requests") +def test_get_user_name_and_id_with_github_oauth(mock_requests: mock.MagicMock) -> None: + # Given + # mock the post to get the access token + mock_requests.post.return_value = mock.MagicMock( + status_code=200, text="access_token=123456" + ) + client_id = "test-client-id" + client_secret = "test-client-secret" + + # mock the get to get the user info + mock_response = mock.MagicMock(status_code=200) + mock_requests.get.return_value = mock_response + mock_response.json.return_value = {"name": "tommy tester", "id": 123456} + + # When + github_user = GithubUser( + "test-code", + client_id=client_id, + client_secret=client_secret, + ) + user_name_and_id = github_user._get_user_name_and_id() + + # Then + assert user_name_and_id == { + "first_name": "tommy", + "last_name": "tester", + "github_user_id": 123456, + } + + +@mock.patch("custom_auth.oauth.github.requests") +def test_get_primary_email_with_github_oauth(mock_requests: mock.MagicMock) -> None: + # Given + # mock the post to get the access token + mock_requests.post.return_value = mock.MagicMock( + status_code=200, text="access_token=123456" + ) + client_id = "test-client-id" + client_secret = "test-client-secret" + + # mock the request to get the user info + mock_response = mock.MagicMock(status_code=200) + mock_requests.get.return_value = mock_response + + verified_emails = [ + { + "email": f"tommy_tester@example_{i}.com", + "verified": True, + "visibility": None, + "primary": False, } + for i in range(5) + ] - def test_get_primary_email(self): - # Given - # mock the post to get the access token - self.mock_requests.post.return_value = mock.MagicMock( - status_code=200, text="access_token=123456" - ) + # set one of the verified emails to be the primary + verified_emails[3]["primary"] = True - # mock the request to get the user info - mock_response = mock.MagicMock(status_code=200) - self.mock_requests.get.return_value = mock_response - - verified_emails = [ - { - "email": f"tommy_tester@example_{i}.com", - "verified": True, - "visibility": None, - "primary": False, - } - for i in range(5) - ] - - # set one of the verified emails to be the primary - verified_emails[3]["primary"] = True - - mock_response.json.return_value = verified_emails - - # When - github_user = GithubUser( - "test-code", - client_id=self.test_client_id, - client_secret=self.test_client_secret, - ) - primary_email = github_user._get_primary_email() + mock_response.json.return_value = verified_emails + + # When + github_user = GithubUser( + "test-code", + client_id=client_id, + client_secret=client_secret, + ) + primary_email = github_user._get_primary_email() - # Then - assert primary_email == verified_emails[3]["email"] + # Then + assert primary_email == verified_emails[3]["email"] diff --git a/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py b/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py index 7b13a98717a8..da18bc8b7b1d 100644 --- a/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py +++ b/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py @@ -1,7 +1,5 @@ -from unittest import TestCase, mock +from unittest import mock -import pytest -from django.contrib.auth import get_user_model from django.test import RequestFactory from django.utils import timezone from pytest_django.fixtures import SettingsWrapper @@ -13,95 +11,88 @@ GoogleLoginSerializer, OAuthLoginSerializer, ) -from users.models import SignUpType - -UserModel = get_user_model() - - -@pytest.mark.django_db -class OAuthLoginSerializerTestCase(TestCase): - def setUp(self) -> None: - self.test_email = "testytester@example.com" - self.test_first_name = "testy" - self.test_last_name = "tester" - self.test_id = "test-id" - self.mock_user_data = { - "email": self.test_email, - "first_name": self.test_first_name, - "last_name": self.test_last_name, - "google_user_id": self.test_id, - } - rf = RequestFactory() - self.request = rf.post("placeholer-login-url") - - @mock.patch("custom_auth.oauth.serializers.get_user_info") - def test_create(self, mock_get_user_info): - # Given - access_token = "access-token" - sign_up_type = "NO_INVITE" - data = {"access_token": access_token, "sign_up_type": sign_up_type} - serializer = OAuthLoginSerializer(data=data, context={"request": self.request}) - - # monkey patch the get_user_info method to return the mock user data - serializer.get_user_info = lambda: self.mock_user_data - - # When - serializer.is_valid() - response = serializer.save() - - # Then - assert UserModel.objects.filter( - email=self.test_email, sign_up_type=sign_up_type - ).exists() - assert isinstance(response, Token) - assert (timezone.now() - response.user.last_login).seconds < 5 - assert response.user.email == self.test_email - - -class GoogleLoginSerializerTestCase(TestCase): - def setUp(self) -> None: - rf = RequestFactory() - self.request = rf.post("placeholer-login-url") - - @mock.patch("custom_auth.oauth.serializers.get_user_info") - def test_get_user_info(self, mock_get_user_info): - # Given - access_token = "some-access-token" - serializer = GoogleLoginSerializer( - data={"access_token": access_token}, context={"request": self.request} - ) - - # When - serializer.is_valid() - serializer.get_user_info() - - # Then - mock_get_user_info.assert_called_with(access_token) - - -class GithubLoginSerializerTestCase(TestCase): - def setUp(self) -> None: - rf = RequestFactory() - self.request = rf.post("placeholer-login-url") - - @mock.patch("custom_auth.oauth.serializers.GithubUser") - def test_get_user_info(self, MockGithubUser): - # Given - access_token = "some-access-token" - serializer = GithubLoginSerializer( - data={"access_token": access_token}, context={"request": self.request} - ) - - mock_github_user = mock.MagicMock() - MockGithubUser.return_value = mock_github_user - - # When - serializer.is_valid() - serializer.get_user_info() - - # Then - MockGithubUser.assert_called_with(code=access_token) - mock_github_user.get_user_info.assert_called() +from users.models import FFAdminUser, SignUpType + + +@mock.patch("custom_auth.oauth.serializers.get_user_info") +def test_create_oauth_login_serializer( + mock_get_user_info: mock.MagicMock, db: None +) -> None: + # Given + access_token = "access-token" + sign_up_type = "NO_INVITE" + data = {"access_token": access_token, "sign_up_type": sign_up_type} + rf = RequestFactory() + request = rf.post("placeholer-login-url") + email = "testytester@example.com" + first_name = "testy" + last_name = "tester" + google_user_id = "test-id" + + mock_user_data = { + "email": email, + "first_name": first_name, + "last_name": last_name, + "google_user_id": google_user_id, + } + serializer = OAuthLoginSerializer(data=data, context={"request": request}) + + # monkey patch the get_user_info method to return the mock user data + serializer.get_user_info = lambda: mock_user_data + + # When + serializer.is_valid() + response = serializer.save() + + # Then + assert FFAdminUser.objects.filter(email=email, sign_up_type=sign_up_type).exists() + assert isinstance(response, Token) + assert (timezone.now() - response.user.last_login).seconds < 5 + assert response.user.email == email + + +@mock.patch("custom_auth.oauth.serializers.get_user_info") +def test_get_user_info_with_google_login( + mock_get_user_info: mock.MagicMock, +) -> None: + # Given + rf = RequestFactory() + request = rf.post("placeholer-login-url") + access_token = "some-access-token" + serializer = GoogleLoginSerializer( + data={"access_token": access_token}, context={"request": request} + ) + + # When + serializer.is_valid() + serializer.get_user_info() + + # Then + mock_get_user_info.assert_called_with(access_token) + + +@mock.patch("custom_auth.oauth.serializers.GithubUser") +def test_get_user_info_with_github_login( + mock_github_user_serializer: mock.MagicMock, +) -> None: + # Given + rf = RequestFactory() + request = rf.post("placeholer-login-url") + access_token = "some-access-token" + serializer = GithubLoginSerializer( + data={"access_token": access_token}, context={"request": request} + ) + + mock_github_user = mock.MagicMock() + mock_github_user_serializer.return_value = mock_github_user + + # When + serializer.is_valid() + serializer.get_user_info() + + # Then + mock_github_user_serializer.assert_called_with(code=access_token) + mock_github_user.get_user_info.assert_called() def test_OAuthLoginSerializer_calls_is_authentication_method_valid_correctly_if_auth_controller_is_installed( diff --git a/api/tests/unit/environments/test_unit_environments_models.py b/api/tests/unit/environments/test_unit_environments_models.py index f1c263b1d1d8..046e7a899bba 100644 --- a/api/tests/unit/environments/test_unit_environments_models.py +++ b/api/tests/unit/environments/test_unit_environments_models.py @@ -27,6 +27,10 @@ from features.models import Feature, FeatureState from features.multivariate.models import MultivariateFeatureOption from features.versioning.models import EnvironmentFeatureVersion +from features.versioning.tasks import enable_v2_versioning +from features.versioning.versioning_service import ( + get_environment_flags_queryset, +) from organisations.models import Organisation, OrganisationRole from projects.models import EdgeV2MigrationStatus, Project from segments.models import Segment @@ -937,3 +941,56 @@ def test_create_environment_creates_feature_states_in_all_environments_and_envir EnvironmentFeatureVersion.objects.filter(environment=environment).count() == 2 ) assert environment.feature_states.count() == 2 + + +def test_clone_environment_v2_versioning( + feature: Feature, + feature_state: FeatureState, + segment: Segment, + segment_featurestate: FeatureState, + environment: Environment, +) -> None: + # Given + expected_environment_fs_enabled_value = True + expected_segment_fs_enabled_value = True + + # First let's create some new versions via the old versioning methods + feature_state.clone(environment, version=2) + feature_state.clone(environment, version=3) + + # and a draft version + feature_state.clone(environment, as_draft=True) + + # Now let's enable v2 versioning for the environment + enable_v2_versioning(environment.id) + environment.refresh_from_db() + + # Finally, let's create another version using the new versioning methods + # and update some values on the feature states in it. + v2 = EnvironmentFeatureVersion.objects.create( + feature=feature, environment=environment + ) + v2.feature_states.filter(feature_segment__isnull=True).update( + enabled=expected_environment_fs_enabled_value + ) + v2.feature_states.filter(feature_segment__isnull=False).update( + enabled=expected_segment_fs_enabled_value + ) + v2.publish() + + # When + cloned_environment = environment.clone(name="Cloned environment") + + # Then + assert cloned_environment.use_v2_feature_versioning is True + + cloned_environment_flags = get_environment_flags_queryset(cloned_environment) + + assert ( + cloned_environment_flags.get(feature_segment__isnull=True).enabled + is expected_environment_fs_enabled_value + ) + assert ( + cloned_environment_flags.get(feature_segment__segment=segment).enabled + is expected_segment_fs_enabled_value + ) diff --git a/api/tests/unit/environments/test_unit_environments_views.py b/api/tests/unit/environments/test_unit_environments_views.py index 07f430ba24d4..90760acec236 100644 --- a/api/tests/unit/environments/test_unit_environments_views.py +++ b/api/tests/unit/environments/test_unit_environments_views.py @@ -20,6 +20,7 @@ from environments.permissions.constants import VIEW_ENVIRONMENT from environments.permissions.models import UserEnvironmentPermission from features.models import Feature, FeatureState +from features.versioning.models import EnvironmentFeatureVersion from metadata.models import Metadata, MetadataModelField from organisations.models import Organisation from projects.models import Project @@ -935,3 +936,27 @@ def test_cannot_enable_v2_versioning_for_environment_already_enabled( assert response.json() == {"detail": "Environment already using v2 versioning."} mock_enable_v2_versioning.delay.assert_not_called() + + +def test_total_segment_overrides_correctly_ignores_old_versions( + feature: Feature, + segment_featurestate: FeatureState, + environment_v2_versioning: Environment, + admin_client_new: APIClient, + staff_user: FFAdminUser, +) -> None: + # Given + url = reverse( + "api-v1:environments:environment-detail", + args=[environment_v2_versioning.api_key], + ) + + EnvironmentFeatureVersion.objects.create( + feature=feature, environment=environment_v2_versioning + ).publish(staff_user) + + # When + response = admin_client_new.get(url) + + # Then + assert response.json()["total_segment_overrides"] == 1 diff --git a/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py b/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py index 48b4210645ee..0bfc0a895375 100644 --- a/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py +++ b/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py @@ -6,6 +6,9 @@ from rest_framework import status from rest_framework.test import APIClient +from audit.constants import SEGMENT_FEATURE_STATE_DELETED_MESSAGE +from audit.models import AuditLog +from audit.related_object_type import RelatedObjectType from environments.models import Environment from environments.permissions.constants import ( MANAGE_SEGMENT_OVERRIDES, @@ -596,3 +599,43 @@ def test_get_feature_segments_only_returns_latest_version( response_json = response.json() assert response_json["count"] == 1 assert response_json["results"][0]["id"] == feature_segment_v2.id + + +def test_delete_feature_segment_does_not_create_audit_log_for_versioning_v2( + feature: Feature, + segment: Segment, + feature_segment: FeatureSegment, + segment_featurestate: FeatureState, + environment_v2_versioning: Environment, + staff_client: APIClient, + with_environment_permissions: WithEnvironmentPermissionsCallable, + with_project_permissions: WithProjectPermissionsCallable, +) -> None: + # Given + with_project_permissions([VIEW_PROJECT]) + with_environment_permissions([MANAGE_SEGMENT_OVERRIDES, VIEW_ENVIRONMENT]) + + # we first need to create a new version so that we can modify the feature segment + # that is generated as part of the new version + version_2 = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, feature=feature + ) + version_2_feature_segment = FeatureSegment.objects.get( + feature=feature, segment=segment, environment_feature_version=version_2 + ) + + url = reverse( + "api-v1:features:feature-segment-detail", args=[version_2_feature_segment.id] + ) + + # When + response = staff_client.delete(url) + + # Then + assert response.status_code == status.HTTP_204_NO_CONTENT + + assert not AuditLog.objects.filter( + related_object_type=RelatedObjectType.FEATURE.name, + related_object_id=feature.id, + log=SEGMENT_FEATURE_STATE_DELETED_MESSAGE % (feature.name, segment.name), + ).exists() diff --git a/api/tests/unit/features/test_unit_feature_external_resources_views.py b/api/tests/unit/features/test_unit_feature_external_resources_views.py index 84d4b169018f..901454b6add6 100644 --- a/api/tests/unit/features/test_unit_feature_external_resources_views.py +++ b/api/tests/unit/features/test_unit_feature_external_resources_views.py @@ -1,3 +1,8 @@ +from datetime import datetime +from unittest.mock import MagicMock + +import pytest +import responses import simplejson as json from django.core.serializers.json import DjangoJSONEncoder from django.urls import reverse @@ -9,14 +14,18 @@ from environments.models import Environment from environments.permissions.constants import UPDATE_FEATURE_STATE from features.feature_external_resources.models import FeatureExternalResource -from features.models import Feature, FeatureState +from features.models import Feature, FeatureSegment, FeatureState from features.serializers import ( FeatureStateSerializerBasic, WritableNestedFeatureStateSerializer, ) +from features.versioning.models import EnvironmentFeatureVersion +from integrations.github.constants import GITHUB_API_URL, GITHUB_API_VERSION from integrations.github.models import GithubConfiguration, GithubRepository from projects.models import Project +from segments.models import Segment from tests.types import WithEnvironmentPermissionsCallable +from users.models import FFAdminUser _django_json_encoder_default = DjangoJSONEncoder().default @@ -56,50 +65,50 @@ def expected_segment_comment_body( ) -def mocked_requests_post(*args, **kwargs): - class MockResponse: - def __init__(self, json_data, status_code): - self.json_data = json_data - self.status_code = status_code - - def raise_for_status(self) -> None: - pass - - def json(self): - return self.json_data - - return MockResponse(json_data={"data": "data"}, status_code=200) - - +@responses.activate def test_create_feature_external_resource( admin_client_new: APIClient, feature_with_value: Feature, - segment_featurestate_and_feature_with_value: FeatureState, + segment_override_for_feature_with_value: FeatureState, environment: Environment, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + post_request_mock: MagicMock, mocker: MockerFixture, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", ) - mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch( - "requests.post", side_effect=mocked_requests_post + feature_external_resource_data = { + "type": "GITHUB_ISSUE", + "url": "https://github.com/repoowner/repo-name/issues/35", + "feature": feature_with_value.id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature_with_value.id}, ) - feature_state = FeatureState.objects.filter(feature=feature_with_value).first() - feature_state_updated_at = feature_state.updated_at.strftime( - get_format("DATETIME_INPUT_FORMATS")[0] + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" ) - segment_override_updated_at = ( - segment_featurestate_and_feature_with_value.updated_at.strftime( - get_format("DATETIME_INPUT_FORMATS")[0] - ) + + # Then + feature_state_update_at = ( + FeatureState.objects.filter(feature=feature_with_value) + .first() + .updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) ) + segment_override_updated_at = FeatureState.objects.get( + id=segment_override_for_feature_with_value.id + ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) expected_comment_body = ( "**Flagsmith feature linked:** `feature_with_value`\n" @@ -108,7 +117,7 @@ def test_create_feature_external_resource( project.id, environment.api_key, feature_with_value.id, - feature_state_updated_at, + feature_state_update_at, ) + "\n" + expected_segment_comment_body( @@ -120,30 +129,12 @@ def test_create_feature_external_resource( "`value`", ) ) - - feature_external_resource_data = { - "type": "GITHUB_ISSUE", - "url": "https://github.com/repoowner/repo-name/issues/35", - "feature": feature_with_value.id, - "metadata": {"status": "open"}, - } - - url = reverse( - "api-v1:projects:feature-external-resources-list", - kwargs={"project_pk": project.id, "feature_pk": feature_with_value.id}, - ) - - # When - response = admin_client_new.post( - url, data=feature_external_resource_data, format="json" - ) - - # Then - github_request_mock.assert_called_with( + post_request_mock.assert_called_with( "https://api.github.com/repos/repoowner/repo-name/issues/35/comments", json={"body": f"{expected_comment_body}"}, headers={ "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, "Authorization": "Bearer mocked_token", }, timeout=10, @@ -164,6 +155,12 @@ def test_create_feature_external_resource( assert feature_external_resources[0].url == feature_external_resource_data["url"] # And When + responses.add( + method="GET", + url=f"{GITHUB_API_URL}repos/repoowner/repo-name/issues/35", + status=200, + json={"title": "resource name", "state": "open"}, + ) url = reverse( "api-v1:projects:feature-external-resources-list", kwargs={"project_pk": project.id, "feature_pk": feature_with_value.id}, @@ -173,11 +170,13 @@ def test_create_feature_external_resource( # Then assert response.status_code == status.HTTP_200_OK - assert response.json()["count"] == 1 + assert len(response.json()["results"]) == 1 assert ( response.json()["results"][0]["type"] == feature_external_resource_data["type"] ) assert response.json()["results"][0]["url"] == feature_external_resource_data["url"] + feature_external_resource_data["metadata"]["title"] = "resource name" + assert ( response.json()["results"][0]["metadata"] == feature_external_resource_data["metadata"] @@ -257,15 +256,15 @@ def test_cannot_create_feature_external_resource_when_the_type_is_incorrect( def test_cannot_create_feature_external_resource_due_to_unique_constraint( admin_client_new: APIClient, feature: Feature, - feature_external_resource: FeatureExternalResource, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, ) -> None: # Given feature_external_resource_data = { "type": "GITHUB_ISSUE", - "url": "https://github.com/userexample/example-project-repo/issues/11", + "url": "https://github.com/repositoryownertest/repositorynametest/issues/11", "feature": feature.id, } url = reverse( @@ -278,28 +277,55 @@ def test_cannot_create_feature_external_resource_due_to_unique_constraint( # Then assert response.status_code == status.HTTP_400_BAD_REQUEST assert ( - "Duplication error. The feature already has this resource URI" - in response.json()[0] + response.json()["non_field_errors"][0] + == "The fields feature, url must make a unique set." ) -def test_delete_feature_external_resource( +def test_update_feature_external_resource( admin_client_new: APIClient, - feature_external_resource: FeatureExternalResource, feature: Feature, + feature_external_resource: FeatureExternalResource, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + post_request_mock: MagicMock, + mocker: MockerFixture, ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch( - "requests.post", side_effect=mocked_requests_post + mock_generate_token.return_value = "mocked_token" + feature_external_resource_data = { + "type": "GITHUB_ISSUE", + "url": "https://github.com/userexample/example-project-repo/issues/12", + "feature": feature.id, + } + url = reverse( + "api-v1:projects:feature-external-resources-detail", + args=[project.id, feature.id, feature_external_resource.id], ) + # When + response = admin_client_new.put(url, data=feature_external_resource_data) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json()["url"] == feature_external_resource_data["url"] + + +def test_delete_feature_external_resource( + admin_client_new: APIClient, + feature: Feature, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + post_request_mock: MagicMock, + mocker: MockerFixture, +) -> None: + # Given url = reverse( "api-v1:projects:feature-external-resources-detail", args=[project.id, feature.id, feature_external_resource.id], @@ -309,13 +335,14 @@ def test_delete_feature_external_resource( response = admin_client_new.delete(url) # Then - github_request_mock.assert_called_with( - "https://api.github.com/repos/userexample/example-project-repo/issues/11/comments", + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={ "body": "### The feature flag `Test Feature1` was unlinked from the issue/PR" }, headers={ "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, "Authorization": "Bearer mocked_token", }, timeout=10, @@ -326,20 +353,32 @@ def test_delete_feature_external_resource( ).exists() +@responses.activate def test_get_feature_external_resources( admin_client_new: APIClient, - feature_external_resource: FeatureExternalResource, feature: Feature, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + mocker: MockerFixture, ) -> None: # Given + mocker.patch( + "integrations.github.client.generate_token", + ) url = reverse( "api-v1:projects:feature-external-resources-list", kwargs={"project_pk": project.id, "feature_pk": feature.id}, ) + responses.add( + method="GET", + url=f"{GITHUB_API_URL}repos/repositoryownertest/repositorynametest/issues/11", + status=200, + json={"title": "resource name", "state": "open"}, + ) + # When response = admin_client_new.get(url) @@ -349,11 +388,11 @@ def test_get_feature_external_resources( def test_get_feature_external_resource( admin_client_new: APIClient, - feature_external_resource: FeatureExternalResource, feature: Feature, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, ) -> None: # Given url = reverse( @@ -372,33 +411,40 @@ def test_get_feature_external_resource( def test_create_github_comment_on_feature_state_updated( + staff_user: FFAdminUser, staff_client: APIClient, with_environment_permissions: WithEnvironmentPermissionsCallable, - feature_external_resource: FeatureExternalResource, feature: Feature, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + post_request_mock: MagicMock, mocker: MockerFixture, environment: Environment, + feature_external_resource: FeatureExternalResource, ) -> None: # Given - with_environment_permissions([UPDATE_FEATURE_STATE]) + with_environment_permissions([UPDATE_FEATURE_STATE], environment.id, False) feature_state = FeatureState.objects.get( feature=feature, environment=environment.id ) - mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", - ) - mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch( - "requests.post", side_effect=mocked_requests_post - ) - feature_state_updated_at = feature_state.updated_at.strftime( - get_format("DATETIME_INPUT_FORMATS")[0] + payload = dict(FeatureStateSerializerBasic(instance=feature_state).data) + + payload["enabled"] = not feature_state.enabled + url = reverse( + viewname="api-v1:environments:environment-featurestates-detail", + kwargs={"environment_api_key": environment.api_key, "pk": feature_state.id}, ) + # When + response = staff_client.put(path=url, data=payload, format="json") + + # Then + feature_state_updated_at = FeatureState.objects.get( + id=feature_state.id + ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) + expected_body_comment = ( "Flagsmith Feature `Test Feature1` has been updated:\n" + expected_default_body( @@ -411,25 +457,14 @@ def test_create_github_comment_on_feature_state_updated( ) ) - payload = dict(FeatureStateSerializerBasic(instance=feature_state).data) - - payload["enabled"] = not feature_state.enabled - url = reverse( - viewname="api-v1:environments:environment-featurestates-detail", - kwargs={"environment_api_key": environment.api_key, "pk": feature_state.id}, - ) - - # When - response = staff_client.put(path=url, data=payload, format="json") - - # Then assert response.status_code == status.HTTP_200_OK - github_request_mock.assert_called_with( - "https://api.github.com/repos/userexample/example-project-repo/issues/11/comments", + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={"body": expected_body_comment}, headers={ "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, "Authorization": "Bearer mocked_token", }, timeout=10, @@ -439,21 +474,18 @@ def test_create_github_comment_on_feature_state_updated( def test_create_github_comment_on_feature_was_deleted( admin_client: APIClient, with_environment_permissions: WithEnvironmentPermissionsCallable, - feature_external_resource: FeatureExternalResource, feature: Feature, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + post_request_mock: MagicMock, mocker: MockerFixture, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", - ) - mock_generate_token.return_value = "mocked_token" - - github_request_mock = mocker.patch( - "requests.post", side_effect=mocked_requests_post + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", ) url = reverse( @@ -467,11 +499,12 @@ def test_create_github_comment_on_feature_was_deleted( # Then assert response.status_code == status.HTTP_204_NO_CONTENT - github_request_mock.assert_called_with( - "https://api.github.com/repos/userexample/example-project-repo/issues/11/comments", + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={"body": "### The Feature Flag `Test Feature1` was deleted"}, headers={ "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, "Authorization": "Bearer mocked_token", }, timeout=10, @@ -480,23 +513,21 @@ def test_create_github_comment_on_feature_was_deleted( def test_create_github_comment_on_segment_override_updated( feature_with_value: Feature, - segment_featurestate_and_feature_with_value: FeatureState, - feature_with_value_external_resource: FeatureExternalResource, + segment_override_for_feature_with_value: FeatureState, project: Project, github_configuration: GithubConfiguration, github_repository: GithubRepository, + post_request_mock: MagicMock, mocker: MockerFixture, environment: Environment, admin_client: APIClient, + feature_with_value_external_resource: FeatureExternalResource, ) -> None: # Given - feature_state = segment_featurestate_and_feature_with_value - mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", - ) - mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch( - "requests.post", side_effect=mocked_requests_post + feature_state = segment_override_for_feature_with_value + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", ) payload = dict(WritableNestedFeatureStateSerializer(instance=feature_state).data) @@ -509,11 +540,13 @@ def test_create_github_comment_on_segment_override_updated( kwargs={"pk": feature_state.id}, ) - segment_override_updated_at = ( - segment_featurestate_and_feature_with_value.updated_at.strftime( - get_format("DATETIME_INPUT_FORMATS")[0] - ) - ) + # When + response = admin_client.put(path=url, data=payload, format="json") + + # Then + segment_override_updated_at = FeatureState.objects.get( + id=segment_override_for_feature_with_value.id + ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) expected_comment_body = ( "Flagsmith Feature `feature_with_value` has been updated:\n" @@ -528,17 +561,254 @@ def test_create_github_comment_on_segment_override_updated( ) ) + assert response.status_code == status.HTTP_200_OK + + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", + json={"body": expected_comment_body}, + headers={ + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, + "Authorization": "Bearer mocked_token", + }, + timeout=10, + ) + + +def test_create_github_comment_on_segment_override_deleted( + segment_override_for_feature_with_value: FeatureState, + feature_with_value_segment: FeatureSegment, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + post_request_mock: MagicMock, + mocker: MockerFixture, + admin_client_new: APIClient, + feature_with_value_external_resource: FeatureExternalResource, +) -> None: + # Given + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", + ) + + url = reverse( + viewname="api-v1:features:feature-segment-detail", + kwargs={"pk": feature_with_value_segment.id}, + ) + # When - response = admin_client.put(path=url, data=payload, format="json") + response = admin_client_new.delete(path=url, format="json") # Then - assert response.status_code == status.HTTP_200_OK - github_request_mock.assert_called_with( - "https://api.github.com/repos/userexample/example-project-repo/issues/11/comments", + assert response.status_code == status.HTTP_204_NO_CONTENT + + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", + json={ + "body": "### The Segment Override `segment` for Feature Flag `feature_with_value` was deleted" + }, + headers={ + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, + "Authorization": "Bearer mocked_token", + }, + timeout=10, + ) + + +def test_create_github_comment_using_v2( + admin_client_new: APIClient, + environment_v2_versioning: Environment, + segment: Segment, + feature: Feature, + environment: Environment, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + post_request_mock: MagicMock, + mocker: MockerFixture, +) -> None: + # Given + environment_feature_version = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, feature=feature + ) + + url = reverse( + "api-v1:versioning:environment-feature-version-featurestates-list", + args=[ + environment_v2_versioning.id, + feature.id, + environment_feature_version.uuid, + ], + ) + + data = { + "feature_segment": {"segment": segment.id}, + "enabled": True, + "feature_state_value": { + "string_value": "segment value!", + }, + } + + # When + response = admin_client_new.post( + url, data=json.dumps(data), content_type="application/json" + ) + response_data = response.json() + + # Then + format = "%Y-%m-%dT%H:%M:%S.%fZ" + formatted_updated_at = datetime.strptime( + response_data["updated_at"], format + ).strftime(get_format("DATETIME_INPUT_FORMATS")[0]) + expected_comment_body = ( + "Flagsmith Feature `Test Feature1` has been updated:\n" + + "\n" + + expected_segment_comment_body( + project.id, + environment.api_key, + feature.id, + formatted_updated_at, + "✅ Enabled", + "`segment value!`", + ) + ) + + post_request_mock.assert_called_with( + "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={"body": expected_comment_body}, headers={ "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, + "Authorization": "Bearer mocked_token", + }, + timeout=10, + ) + + assert response.status_code == status.HTTP_201_CREATED + + +def test_create_github_comment_using_v2_fails_on_wrong_params( + admin_client_new: APIClient, + environment_v2_versioning: Environment, + segment: Segment, + feature: Feature, + environment: Environment, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + post_request_mock: MagicMock, + mocker: MockerFixture, +) -> None: + + # Given + environment_feature_version = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, feature=feature + ) + + url = reverse( + "api-v1:versioning:environment-feature-version-featurestates-list", + args=[ + environment_v2_versioning.id, + feature.id, + environment_feature_version.uuid, + ], + ) + + data = { + "feature_segment": {"segment": segment.id}, + "enabled": True, + "feature_state_value": { + "string_value": {"value": "wrong structure"}, + }, + } + + # When + response = admin_client_new.post( + url, data=json.dumps(data), content_type="application/json" + ) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +@responses.activate +@pytest.mark.freeze_time("2024-05-28T09:09:47.325132+00:00") +def test_create_feature_external_resource_on_environment_with_v2( + admin_client_new: APIClient, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + segment_override_for_feature_with_value: FeatureState, + environment_v2_versioning: Environment, + post_request_mock: MagicMock, + mocker: MockerFixture, +) -> None: + # Given + feature_id = segment_override_for_feature_with_value.feature_id + + mocker.patch( + "integrations.github.client.generate_token", + return_value="mocked_token", + ) + + feature_external_resource_data = { + "type": "GITHUB_ISSUE", + "url": "https://github.com/repoowner/repo-name/issues/35", + "feature": feature_id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature_id}, + ) + + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" + ) + + # Then + feature_state_update_at = FeatureState.objects.get( + id=segment_override_for_feature_with_value.id + ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) + + segment_override_updated_at = FeatureState.objects.get( + id=segment_override_for_feature_with_value.id + ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) + + expected_comment_body = ( + "**Flagsmith feature linked:** `feature_with_value`\n" + + "Default Values:\n" + + expected_default_body( + project.id, + environment_v2_versioning.api_key, + feature_id, + feature_state_update_at, + ) + + "\n" + + expected_segment_comment_body( + project.id, + environment_v2_versioning.api_key, + feature_id, + segment_override_updated_at, + "❌ Disabled", + "`value`", + ) + ) + + assert response.status_code == status.HTTP_201_CREATED + + post_request_mock.assert_called_with( + "https://api.github.com/repos/repoowner/repo-name/issues/35/comments", + json={"body": f"{expected_comment_body}"}, + headers={ + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": GITHUB_API_VERSION, "Authorization": "Bearer mocked_token", }, timeout=10, diff --git a/api/tests/unit/features/test_unit_features_models.py b/api/tests/unit/features/test_unit_features_models.py index 943d96c57aa7..0b80f5bc1fc8 100644 --- a/api/tests/unit/features/test_unit_features_models.py +++ b/api/tests/unit/features/test_unit_features_models.py @@ -660,7 +660,7 @@ def test_feature_state_get_skip_create_audit_log_if_uncommitted_change_request( def test_feature_state_get_skip_create_audit_log_if_environment_feature_version( - environment_v2_versioning, feature + environment_v2_versioning: Environment, feature: Feature ): # Given environment_feature_version = EnvironmentFeatureVersion.objects.get( @@ -676,6 +676,23 @@ def test_feature_state_get_skip_create_audit_log_if_environment_feature_version( assert feature_state.get_skip_create_audit_log() is True +def test_feature_state_value_get_skip_create_audit_log_if_environment_feature_version( + environment_v2_versioning: Environment, feature: Feature +): + # Given + environment_feature_version = EnvironmentFeatureVersion.objects.get( + environment=environment_v2_versioning, feature=feature + ) + feature_state = FeatureState.objects.get( + environment=environment_v2_versioning, + feature=feature, + environment_feature_version=environment_feature_version, + ) + + # Then + assert feature_state.feature_state_value.get_skip_create_audit_log() is True + + @pytest.mark.parametrize( "feature_segment_id, identity_id, expected_function_name", ( diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py index c579d1ab0af9..20f15cf00b6a 100644 --- a/api/tests/unit/features/test_unit_features_views.py +++ b/api/tests/unit/features/test_unit_features_views.py @@ -455,7 +455,7 @@ def test_get_project_features_influx_data( mock_get_event_list.assert_called_once_with( feature_name=feature.name, environment_id=str(environment.id), # provided as a GET param - period="24h", # this is the default but can be provided as a GET param + date_start="-24h", # this is the default but can be provided as a GET param aggregate_every="24h", # this is the default but can be provided as a GET param ) diff --git a/api/tests/unit/features/versioning/test_unit_versioning_models.py b/api/tests/unit/features/versioning/test_unit_versioning_models.py index 76676a3c8db6..ee26cc905e4f 100644 --- a/api/tests/unit/features/versioning/test_unit_versioning_models.py +++ b/api/tests/unit/features/versioning/test_unit_versioning_models.py @@ -1,4 +1,5 @@ import typing +from datetime import timedelta import pytest from django.utils import timezone @@ -159,6 +160,36 @@ def test_get_previous_version_ignores_unpublished_version( assert version_3.get_previous_version() == version_1 +def test_get_previous_version_returns_previous_version_if_there_is_a_more_recent_previous_version( + feature: "Feature", + environment_v2_versioning: Environment, + admin_user: "FFAdminUser", +) -> None: + # Given + # The initial version created when enabling versioning_v2 + version_0 = EnvironmentFeatureVersion.objects.get( + environment=environment_v2_versioning, feature=feature + ) + + # Now, let's create (and publish) 2 new versions + version_1 = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, feature=feature + ) + version_1.publish(admin_user) + version_2 = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, feature=feature + ) + version_2.publish(admin_user) + + # When + previous_version = version_1.get_previous_version() + + # Then + # The previous version for the first version we created should be the + # original version created when enabling versioning_v2 + assert previous_version == version_0 + + def test_publish( feature: "Feature", project: "Project", @@ -218,3 +249,31 @@ def test_update_version_webhooks_triggered_when_version_published( kwargs={"environment_feature_version_uuid": str(new_version.uuid)}, delay_until=new_version.live_from, ) + + +def test_get_latest_versions_does_not_return_versions_scheduled_for_the_future( + environment_v2_versioning: Environment, + feature: "Feature", + admin_user: "FFAdminUser", +) -> None: + # Given + version_0 = EnvironmentFeatureVersion.objects.get( + environment=environment_v2_versioning, feature=feature + ) + + # Let's create a version scheduled for the future, that we'll publish + scheduled_version = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, + feature=feature, + live_from=timezone.now() + timedelta(hours=1), + ) + scheduled_version.publish(admin_user) + + # When + latest_versions = EnvironmentFeatureVersion.objects.get_latest_versions_as_queryset( + environment_id=environment_v2_versioning.id + ) + + # Then + assert latest_versions.count() == 1 + assert latest_versions.first() == version_0 diff --git a/api/tests/unit/features/versioning/test_unit_versioning_tasks.py b/api/tests/unit/features/versioning/test_unit_versioning_tasks.py index 597bc64a6736..ee6f56f82f5f 100644 --- a/api/tests/unit/features/versioning/test_unit_versioning_tasks.py +++ b/api/tests/unit/features/versioning/test_unit_versioning_tasks.py @@ -158,7 +158,7 @@ def test_trigger_update_version_webhooks( # Then mock_call_environment_webhooks.assert_called_once_with( - environment=environment_v2_versioning, + environment=environment_v2_versioning.id, data={ "uuid": str(version.uuid), "feature": {"id": feature.id, "name": feature.name}, diff --git a/api/tests/unit/features/versioning/test_unit_versioning_views.py b/api/tests/unit/features/versioning/test_unit_versioning_views.py index 76d7b471840c..3a68ea9fd5a3 100644 --- a/api/tests/unit/features/versioning/test_unit_versioning_views.py +++ b/api/tests/unit/features/versioning/test_unit_versioning_views.py @@ -9,6 +9,10 @@ from rest_framework import status from rest_framework.test import APIClient +from api_keys.models import MasterAPIKey +from audit.constants import ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE +from audit.models import AuditLog +from audit.related_object_type import RelatedObjectType from environments.models import Environment from environments.permissions.constants import VIEW_ENVIRONMENT from features.models import Feature, FeatureSegment, FeatureState @@ -122,6 +126,120 @@ def test_delete_feature_version( assert environment_feature_version.deleted is True +def test_retrieve_environment_feature_version_permission_denied( + feature: Feature, + environment_v2_versioning: Environment, + staff_client: APIClient, +) -> None: + # Given + efv = EnvironmentFeatureVersion.objects.first() + + url = reverse("api-v1:versioning:get-efv-by-uuid", args=[str(efv.uuid)]) + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_retrieve_feature_version_with_no_previous_version( + feature: Feature, + environment_v2_versioning: Environment, + staff_client: APIClient, + with_environment_permissions: WithEnvironmentPermissionsCallable, + with_project_permissions: WithProjectPermissionsCallable, +) -> None: + # Given + environment_feature_version = EnvironmentFeatureVersion.objects.get( + feature=feature, environment=environment_v2_versioning + ) + + url = reverse( + "api-v1:versioning:get-efv-by-uuid", args=[environment_feature_version.uuid] + ) + + with_environment_permissions([VIEW_ENVIRONMENT]) + with_project_permissions([VIEW_PROJECT]) + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + + response_json = response.json() + assert response_json["uuid"] == str(environment_feature_version.uuid) + assert response_json["previous_version_uuid"] is None + assert response_json["feature"] == feature.id + assert response_json["environment"] == environment_v2_versioning.id + + +def test_retrieve_feature_version_with_previous_version( + feature: Feature, + environment_v2_versioning: Environment, + staff_user: FFAdminUser, + staff_client: APIClient, + with_environment_permissions: WithEnvironmentPermissionsCallable, + with_project_permissions: WithProjectPermissionsCallable, +) -> None: + # Given + with_environment_permissions([VIEW_ENVIRONMENT]) + with_project_permissions([VIEW_PROJECT]) + + version_1 = EnvironmentFeatureVersion.objects.filter( + feature=feature, environment=environment_v2_versioning + ).get() + version_2 = EnvironmentFeatureVersion.objects.create( + feature=feature, environment=environment_v2_versioning + ) + version_2.publish(published_by=staff_user) + + url = reverse("api-v1:versioning:get-efv-by-uuid", args=[version_2.uuid]) + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + + response_json = response.json() + assert response_json["uuid"] == str(version_2.uuid) + assert response_json["previous_version_uuid"] == str(version_1.uuid) + + +def test_retrieve_feature_version_for_unpublished_version( + feature: Feature, + environment_v2_versioning: Environment, + staff_user: FFAdminUser, + staff_client: APIClient, + with_environment_permissions: WithEnvironmentPermissionsCallable, + with_project_permissions: WithProjectPermissionsCallable, +) -> None: + # Given + with_environment_permissions([VIEW_ENVIRONMENT]) + with_project_permissions([VIEW_PROJECT]) + + version_1 = EnvironmentFeatureVersion.objects.filter( + feature=feature, environment=environment_v2_versioning + ).get() + version_2 = EnvironmentFeatureVersion.objects.create( + feature=feature, environment=environment_v2_versioning + ) + + url = reverse("api-v1:versioning:get-efv-by-uuid", args=[version_2.uuid]) + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + + response_json = response.json() + assert response_json["uuid"] == str(version_2.uuid) + assert response_json["previous_version_uuid"] == str(version_1.uuid) + + def test_cannot_delete_live_feature_version( admin_client: APIClient, environment_v2_versioning: Environment, @@ -189,9 +307,18 @@ def test_publish_feature_version( environment_feature_version.live_from == now if live_from is None else live_from ) + # and an audit log record is created correctly + record = AuditLog.objects.filter( + related_object_type=RelatedObjectType.EF_VERSION.name, + related_object_uuid=environment_feature_version.uuid, + ).first() + assert record + assert record.log == ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE % feature.name + @pytest.mark.parametrize("live_from", (None, tomorrow)) def test_publish_feature_version_using_master_api_key( + admin_master_api_key: MasterAPIKey, admin_master_api_key_client: APIClient, environment_v2_versioning: Environment, feature: Feature, @@ -223,6 +350,7 @@ def test_publish_feature_version_using_master_api_key( assert environment_feature_version.is_live is True assert environment_feature_version.published is True assert environment_feature_version.published_by is None + assert environment_feature_version.published_by_api_key == admin_master_api_key[0] assert ( environment_feature_version.live_from == now if live_from is None else live_from ) diff --git a/api/tests/unit/features/workflows/core/test_unit_workflows_models.py b/api/tests/unit/features/workflows/core/test_unit_workflows_models.py index e9bcc5d1589c..c7fa0febcdc1 100644 --- a/api/tests/unit/features/workflows/core/test_unit_workflows_models.py +++ b/api/tests/unit/features/workflows/core/test_unit_workflows_models.py @@ -9,6 +9,7 @@ CHANGE_REQUEST_APPROVED_MESSAGE, CHANGE_REQUEST_COMMITTED_MESSAGE, CHANGE_REQUEST_CREATED_MESSAGE, + ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE, FEATURE_STATE_UPDATED_BY_CHANGE_REQUEST_MESSAGE, ) from audit.models import AuditLog @@ -703,3 +704,30 @@ def test_can_delete_committed_change_request_scheduled_for_the_future_with_envir # Then assert not ChangeRequest.objects.filter(id=change_request.id).exists() + + +def test_committing_change_request_with_environment_feature_versions_creates_publish_audit_log( + feature: Feature, environment_v2_versioning: Environment, admin_user: FFAdminUser +) -> None: + # Given + change_request = ChangeRequest.objects.create( + title="Test CR", + environment=environment_v2_versioning, + user=admin_user, + ) + + environment_feature_version = EnvironmentFeatureVersion.objects.create( + environment=environment_v2_versioning, + feature=feature, + change_request=change_request, + ) + + # When + change_request.commit(admin_user) + + # Then + assert AuditLog.objects.filter( + related_object_uuid=environment_feature_version.uuid, + related_object_type=RelatedObjectType.EF_VERSION.name, + log=ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE % feature.name, + ).exists() diff --git a/api/tests/unit/integrations/github/test_unit_github_views.py b/api/tests/unit/integrations/github/test_unit_github_views.py index c0b158a5abfd..9fbe84d57974 100644 --- a/api/tests/unit/integrations/github/test_unit_github_views.py +++ b/api/tests/unit/integrations/github/test_unit_github_views.py @@ -1,21 +1,41 @@ import json +from typing import Any import pytest +import requests +import responses from django.conf import settings from django.urls import reverse from pytest_lazyfixture import lazy_fixture from pytest_mock import MockerFixture from rest_framework import status +from rest_framework.response import Response from rest_framework.test import APIClient from features.feature_external_resources.models import FeatureExternalResource +from integrations.github.constants import GITHUB_API_URL from integrations.github.models import GithubConfiguration, GithubRepository -from integrations.github.views import github_webhook_payload_is_valid +from integrations.github.views import ( + github_api_call_error_handler, + github_webhook_payload_is_valid, +) from organisations.models import Organisation from projects.models import Project WEBHOOK_PAYLOAD = json.dumps({"installation": {"id": 1234567}, "action": "deleted"}) +WEBHOOK_PAYLOAD_WITH_AN_INVALID_INSTALLATION_ID = json.dumps( + {"installation": {"id": 765432}, "action": "deleted"} +) +WEBHOOK_PAYLOAD_WITHOUT_INSTALLATION_ID = json.dumps( + {"installation": {"test": 765432}, "action": "deleted"} +) WEBHOOK_SIGNATURE = "sha1=57a1426e19cdab55dd6d0c191743e2958e50ccaa" +WEBHOOK_SIGNATURE_WITH_AN_INVALID_INSTALLATION_ID = ( + "sha1=081eef49d04df27552587d5df1c6b76e0fe20d21" +) +WEBHOOK_SIGNATURE_WITHOUT_INSTALLATION_ID = ( + "sha1=f99796bd3cebb902864e87ed960c5cca8772ff67" +) WEBHOOK_SECRET = "secret-key" @@ -100,11 +120,13 @@ def test_cannot_create_github_configuration_when_the_organization_already_has_an ) +@responses.activate def test_delete_github_configuration( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, + mocker: MockerFixture, ) -> None: # Given url = reverse( @@ -114,10 +136,62 @@ def test_delete_github_configuration( github_configuration.id, ], ) + + mock_generate_token = mocker.patch( + "integrations.github.client.generate_jwt_token", + ) + mock_generate_token.return_value = "mocked_token" + responses.add( + method="DELETE", + url=f"{GITHUB_API_URL}app/installations/{github_configuration.installation_id}", + status=204, + ) + # When response = admin_client_new.delete(url) + # Then assert response.status_code == status.HTTP_204_NO_CONTENT + assert not GithubConfiguration.objects.filter(id=github_configuration.id).exists() + + +@responses.activate +def test_cannot_delete_github_configuration_when_delete_github_installation_response_was_404( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + mocker: MockerFixture, +) -> None: + # Given + url = reverse( + "api-v1:organisations:integrations-github-detail", + args=[ + organisation.id, + github_configuration.id, + ], + ) + + mock_generate_token = mocker.patch( + "integrations.github.client.generate_jwt_token", + ) + mock_generate_token.return_value = "mocked_token" + responses.add( + method="DELETE", + url=f"{GITHUB_API_URL}app/installations/{github_configuration.installation_id}", + status=404, + json={"message": "not found"}, + ) + + # When + response = admin_client_new.delete(url) + # Then + assert response.status_code == status.HTTP_502_BAD_GATEWAY + assert ( + response.json()["detail"] + == "Failed to delete GitHub Installation. Error: 404 Client Error: Not Found for url: https://api.github.com/app/installations/1234567" # noqa: E501 + ) + assert GithubConfiguration.objects.filter(id=github_configuration.id).exists() def test_get_github_repository( @@ -136,6 +210,23 @@ def test_get_github_repository( assert response.status_code == status.HTTP_200_OK +def test_cannot_get_github_repository_when_github_pk_in_not_a_number( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, +): + # Given + url = reverse( + "api-v1:organisations:repositories-list", + args=[organisation.id, "str"], + ) + # When + response = admin_client_new.get(url) + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"github_pk": ["Must be an integer"]} + + def test_create_github_repository( admin_client_new: APIClient, organisation: Organisation, @@ -221,14 +312,14 @@ def test_cannot_create_github_repository_due_to_unique_constraint( def test_github_delete_repository( admin_client_new: APIClient, organisation: Organisation, - feature_external_resource: FeatureExternalResource, github_configuration: GithubConfiguration, github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, mocker: MockerFixture, ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.github.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" url = reverse( @@ -237,27 +328,61 @@ def test_github_delete_repository( ) for feature in github_repository.project.features.all(): assert FeatureExternalResource.objects.filter(feature=feature).exists() + # When response = admin_client_new.delete(url) + # Then assert response.status_code == status.HTTP_204_NO_CONTENT for feature in github_repository.project.features.all(): assert not FeatureExternalResource.objects.filter(feature=feature).exists() -def mocked_requests_get(*args, **kwargs): - class MockResponse: - def __init__(self, json_data, status_code): - self.json_data = json_data - self.status_code = status_code +class MockResponse: + def __init__(self, json_data, status_code): + self.json_data = json_data + self.status_code = status_code + self.links = { + "next": "https://example.com/next", + "prev": "https://example.com/prev", + } + + def raise_for_status(self) -> None: + if 400 <= self.status_code < 600: + raise requests.exceptions.HTTPError(f"HTTP Error {self.status_code}") + + def json(self): + return self.json_data + + +def mocked_requests_get_issues_and_pull_requests(*args, **kwargs): + json_data = { + "items": [ + { + "html_url": "https://example.com/1", + "id": 1, + "title": "Title 1", + "number": 101, + "state": "Open", + "merged": False, + "draft": True, + }, + ], + "total_count": 1, + "incomplete_results": True, + } + status_code = 200 + response = MockResponse(json_data, status_code) + + return response - def raise_for_status(self) -> None: - pass - def json(self): - return self.json_data +def mocked_requests_get_error(*args, **kwargs): + json_data = {"detail": "Not found"} + status_code = 404 + response = MockResponse(json_data, status_code) - return MockResponse(json_data={"data": "data"}, status_code=200) + return response def test_fetch_pull_requests( @@ -265,15 +390,17 @@ def test_fetch_pull_requests( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mocker: MockerFixture, ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.views.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch("requests.get", side_effect=mocked_requests_get) + github_request_mock = mocker.patch( + "requests.get", side_effect=mocked_requests_get_issues_and_pull_requests + ) url = reverse("api-v1:organisations:get-github-pulls", args=[organisation.id]) data = {"repo_owner": "owner", "repo_name": "repo"} @@ -283,11 +410,11 @@ def test_fetch_pull_requests( response_json = response.json() # Then - assert response.status_code == 200 - assert "data" in response_json + assert response.status_code == status.HTTP_200_OK + assert "results" in response_json github_request_mock.assert_called_with( - "https://api.github.com/repos/owner/repo/pulls", + "https://api.github.com/search/issues?q= repo:owner/repo is:pr is:open in:title in:body&per_page=100&page=1", # noqa: E501 headers={ "X-GitHub-Api-Version": "2022-11-28", "Accept": "application/vnd.github.v3+json", @@ -297,31 +424,40 @@ def test_fetch_pull_requests( ) -def test_fetch_issue( +def test_fetch_issues( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mocker: MockerFixture, ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.views.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch("requests.get", side_effect=mocked_requests_get) + github_request_mock = mocker.patch( + "requests.get", side_effect=mocked_requests_get_issues_and_pull_requests + ) url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) - data = {"repo_owner": "owner", "repo_name": "repo"} + data = { + "repo_owner": "owner", + "repo_name": "repo", + "search_text": "search text", + "search_in_comments": True, + "author": "author", + "assignee": "assignee", + } # When response = admin_client_new.get(url, data=data) # Then - assert response.status_code == 200 + assert response.status_code == status.HTTP_200_OK response_json = response.json() - assert "data" in response_json + assert "results" in response_json github_request_mock.assert_called_with( - "https://api.github.com/repos/owner/repo/issues", + "https://api.github.com/search/issues?q= search text repo:owner/repo is:issue is:open in:title in:body in:comments author:author assignee:assignee&per_page=100&page=1", # noqa: E501 headers={ "X-GitHub-Api-Version": "2022-11-28", "Accept": "application/vnd.github.v3+json", @@ -331,39 +467,75 @@ def test_fetch_issue( ) +def test_fetch_issues_returns_error_on_bad_response_from_github( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + mocker: MockerFixture, +) -> None: + # Given + mock_generate_token = mocker.patch( + "integrations.github.client.generate_token", + ) + mock_generate_token.return_value = "mocked_token" + mocker.patch("requests.get", side_effect=mocked_requests_get_error) + url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) + data = {"repo_owner": "owner", "repo_name": "repo"} + # When + response = admin_client_new.get(url, data=data) + + # Then + assert response.status_code == status.HTTP_502_BAD_GATEWAY + response_json = response.json() + assert ( + "Failed to retrieve GitHub issues. Error: HTTP Error 404" + in response_json["detail"] + ) + + +@responses.activate def test_fetch_repositories( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mocker: MockerFixture, ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.views.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" - github_request_mock = mocker.patch("requests.get", side_effect=mocked_requests_get) + responses.add( + method="GET", + url=f"{GITHUB_API_URL}installation/repositories", + status=status.HTTP_200_OK, + json={ + "repositories": [ + { + "full_name": "owner/repo-name", + "id": 1, + "name": "repo-name", + }, + ], + "total_count": 1, + }, + ) + url = reverse( "api-v1:organisations:get-github-installation-repos", args=[organisation.id] ) # When - response = admin_client_new.get(url) + response = admin_client_new.get( + url, data={"installation_id": github_configuration.installation_id} + ) # Then - assert response.status_code == 200 + assert response.status_code == status.HTTP_200_OK response_json = response.json() - assert "data" in response_json - - github_request_mock.assert_called_with( - "https://api.github.com/installation/repositories", - headers={ - "X-GitHub-Api-Version": "2022-11-28", - "Accept": "application/vnd.github.v3+json", - "Authorization": "Bearer mocked_token", - }, - timeout=10, - ) + assert "results" in response_json + assert len(response_json["results"]) == 1 @pytest.mark.parametrize( @@ -386,7 +558,7 @@ def test_fetch_issues_and_pull_requests_fails_with_status_400_when_integration_n ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.views.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.generate_token.return_value = "mocked_token" # When @@ -394,7 +566,7 @@ def test_fetch_issues_and_pull_requests_fails_with_status_400_when_integration_n response = client.get(url) # Then - assert response.status_code == 400 + assert response.status_code == status.HTTP_400_BAD_REQUEST @pytest.mark.parametrize( @@ -414,7 +586,7 @@ def test_cannot_fetch_issues_or_prs_when_does_not_have_permissions( ) -> None: # Given mock_generate_token = mocker.patch( - "integrations.github.views.generate_token", + "integrations.github.client.generate_token", ) mock_generate_token.generate_token.return_value = "mocked_token" @@ -463,6 +635,7 @@ def test_verify_github_webhook_payload_returns_false_on_no_signature_header() -> def test_github_webhook_delete_installation( + api_client: APIClient, github_configuration: GithubConfiguration, ) -> None: # Given @@ -470,8 +643,7 @@ def test_github_webhook_delete_installation( url = reverse("api-v1:github-webhook") # When - client = APIClient() - response = client.post( + response = api_client.post( path=url, data=WEBHOOK_PAYLOAD, content_type="application/json", @@ -480,10 +652,62 @@ def test_github_webhook_delete_installation( ) # Then - assert response.status_code == 200 + assert response.status_code == status.HTTP_200_OK assert not GithubConfiguration.objects.filter(installation_id=1234567).exists() +def test_github_webhook_with_non_existing_installation( + api_client: APIClient, + github_configuration: GithubConfiguration, + mocker: MockerFixture, +) -> None: + # Given + settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET + url = reverse("api-v1:github-webhook") + mocker_logger = mocker.patch("integrations.github.github.logger") + + # When + response = api_client.post( + path=url, + data=WEBHOOK_PAYLOAD_WITH_AN_INVALID_INSTALLATION_ID, + content_type="application/json", + HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITH_AN_INVALID_INSTALLATION_ID, + HTTP_X_GITHUB_EVENT="installation", + ) + + # Then + mocker_logger.error.assert_called_once_with( + "GitHub Configuration with installation_id 765432 does not exist" + ) + assert response.status_code == status.HTTP_200_OK + + +def test_github_webhook_without_installation_id( + api_client: APIClient, + github_configuration: GithubConfiguration, + mocker: MockerFixture, +) -> None: + # Given + settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET + url = reverse("api-v1:github-webhook") + mocker_logger = mocker.patch("integrations.github.github.logger") + + # When + response = api_client.post( + path=url, + data=WEBHOOK_PAYLOAD_WITHOUT_INSTALLATION_ID, + content_type="application/json", + HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITHOUT_INSTALLATION_ID, + HTTP_X_GITHUB_EVENT="installation", + ) + + # Then + mocker_logger.error.assert_called_once_with( + "The installation_id is not present in the payload: {'installation': {'test': 765432}, 'action': 'deleted'}" + ) + assert response.status_code == status.HTTP_200_OK + + def test_github_webhook_fails_on_signature_header_missing( github_configuration: GithubConfiguration, ) -> None: @@ -501,7 +725,7 @@ def test_github_webhook_fails_on_signature_header_missing( ) # Then - assert response.status_code == 400 + assert response.status_code == status.HTTP_400_BAD_REQUEST assert response.json() == {"error": "Invalid signature"} assert GithubConfiguration.objects.filter(installation_id=1234567).exists() @@ -524,7 +748,7 @@ def test_github_webhook_fails_on_bad_signature_header_missing( ) # Then - assert response.status_code == 400 + assert response.status_code == status.HTTP_400_BAD_REQUEST assert GithubConfiguration.objects.filter(installation_id=1234567).exists() assert response.json() == {"error": "Invalid signature"} @@ -547,5 +771,276 @@ def test_github_webhook_bypass_event( ) # Then - assert response.status_code == 200 + assert response.status_code == status.HTTP_200_OK assert GithubConfiguration.objects.filter(installation_id=1234567).exists() + + +@responses.activate +def test_cannot_fetch_pull_requests_when_github_request_call_failed( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + mocker, +) -> None: + + # Given + data = {"repo_owner": "owner", "repo_name": "repo"} + mock_generate_token = mocker.patch( + "integrations.github.client.generate_token", + ) + mock_generate_token.return_value = "mocked_token" + responses.add( + method="GET", + url=f"{GITHUB_API_URL}repos/{data['repo_owner']}/{data['repo_name']}/pulls", + status=404, + ) + + url = reverse("api-v1:organisations:get-github-pulls", args=[organisation.id]) + data = {"repo_owner": "owner", "repo_name": "repo"} + + # When + response = admin_client_new.get(url, data=data) + response_json = response.json() + + # Then + assert response.status_code == status.HTTP_502_BAD_GATEWAY + assert "Failed to retrieve GitHub pull requests." in response_json["detail"] + + +@responses.activate +def test_cannot_fetch_pulls_when_the_github_response_was_invalid( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + mocker, +) -> None: + # Given + data = {"repo_owner": "owner", "repo_name": "repo"} + mock_generate_token = mocker.patch( + "integrations.github.client.generate_token", + ) + mock_generate_token.return_value = "mocked_token" + responses.add( + method="GET", + url=f"{GITHUB_API_URL}repos/{data['repo_owner']}/{data['repo_name']}/pulls", + status=200, + json={"details": "invalid"}, + ) + url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) + data = {"repo_owner": "owner", "repo_name": "repo"} + # When + response = admin_client_new.get(url, data=data) + + # Then + assert response.status_code == status.HTTP_502_BAD_GATEWAY + + +def test_cannot_fetch_repositories_when_there_is_no_installation_id( + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse( + "api-v1:organisations:get-github-installation-repos", args=[organisation.id] + ) + # When + response = admin_client_new.get(url) + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"detail": "Missing installation_id parameter"} + + +@responses.activate +def test_fetch_github_repo_contributors( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + mocker: MockerFixture, +) -> None: + # Given + url = reverse( + viewname="api-v1:organisations:get-github-repo-contributors", + args=[organisation.id], + ) + + mocked_github_response = [ + { + "login": "contributor1", + "avatar_url": "https://example.com/avatar1", + "contributions": 150, + }, + { + "login": "contributor2", + "avatar_url": "https://example.com/avatar2", + "contributions": 110, + }, + { + "login": "contributor3", + "avatar_url": "https://example.com/avatar3", + "contributions": 12, + }, + ] + + expected_response = {"results": mocked_github_response} + + mock_generate_token = mocker.patch( + "integrations.github.client.generate_token", + ) + mock_generate_token.return_value = "mocked_token" + + # Add response for endpoint being tested + responses.add( + method=responses.GET, + url=( + f"{GITHUB_API_URL}repos/{github_repository.repository_owner}/{github_repository.repository_name}/" + "contributors?&per_page=100&page=1" + ), + json=mocked_github_response, + status=200, + ) + + # When + response = admin_client_new.get( + path=url, + data={ + "repo_owner": github_repository.repository_owner, + "repo_name": github_repository.repository_name, + }, + ) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == expected_response + + +def test_fetch_github_repo_contributors_with_invalid_query_params( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, +) -> None: + # Given + url = reverse( + viewname="api-v1:organisations:get-github-repo-contributors", + args=[organisation.id], + ) + + # When + response = admin_client_new.get( + path=url, + data={ + "repo_owner": github_repository.repository_owner, + }, + ) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {"error": {"repo_name": ["This field is required."]}} + + +def test_github_api_call_error_handler_with_value_error( + mocker: MockerFixture, +) -> None: + # Given + @github_api_call_error_handler() + def test_view(request): + raise ValueError("Invalid parameter") + + # When + response = test_view(None) + + # Then + assert isinstance(response, Response) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.data == { + "detail": "Failed to retrieve requested information from GitHub API. Error: Invalid parameter" + } + + +@pytest.mark.parametrize( + "page, page_size, error_detail", + [ + ( + 1, + 103, + "Failed to retrieve GitHub repositories. Error: Page size must be an integer between 1 and 100", + ), + ( + 0, + 100, + "Failed to retrieve GitHub repositories. Error: Page must be greater or equal than 1", + ), + ], +) +def test_send_the_invalid_number_page_or_page_size_param_returns_400( + admin_client: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + page: int, + page_size: int, + error_detail: str, +) -> None: + # Given + data: dict[str, str | int] = { + "installation_id": github_configuration.installation_id, + "page": page, + "page_size": page_size, + } + + url = reverse( + "api-v1:organisations:get-github-installation-repos", args=[organisation.id] + ) + # When + response = admin_client.get(url, data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_json = response.json() + assert response_json == {"detail": error_detail} + + +@pytest.mark.parametrize( + "page, page_size, error_response", + [ + ( + 1, + "string", + {"error": {"page_size": ["A valid integer is required."]}}, + ), + ( + "string", + 100, + {"error": {"page": ["A valid integer is required."]}}, + ), + ], +) +def test_send_the_invalid_type_page_or_page_size_param_returns_400( + admin_client: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + page: int, + page_size: int, + error_response: dict[str, Any], +) -> None: + # Given + data: dict[str, str | int] = { + "installation_id": github_configuration.installation_id, + "page": page, + "page_size": page_size, + } + + url = reverse( + "api-v1:organisations:get-github-installation-repos", args=[organisation.id] + ) + # When + response = admin_client.get(url, data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_json = response.json() + assert response_json == error_response diff --git a/api/tests/unit/organisations/chargebee/test_unit_chargebee_chargebee.py b/api/tests/unit/organisations/chargebee/test_unit_chargebee_chargebee.py index e3dae9715336..0e4f443844e9 100644 --- a/api/tests/unit/organisations/chargebee/test_unit_chargebee_chargebee.py +++ b/api/tests/unit/organisations/chargebee/test_unit_chargebee_chargebee.py @@ -9,7 +9,7 @@ from pytz import UTC from organisations.chargebee import ( - add_1000_api_calls, + add_100k_api_calls, add_single_seat, extract_subscription_metadata, get_customer_id_from_subscription_id, @@ -605,12 +605,12 @@ def test_add_single_seat_throws_upgrade_seats_error_error_if_api_error( ) -def test_add_1000_api_calls_when_count_is_empty(mocker: MockerFixture) -> None: +def test_add_100k_api_calls_when_count_is_empty(mocker: MockerFixture) -> None: # Given subscription_mock = mocker.patch("chargebee.Subscription.update") # When - result = add_1000_api_calls( + result = add_100k_api_calls( addon_id=ADDITIONAL_API_SCALE_UP_ADDON_ID, subscription_id="subscription23", count=0, @@ -622,7 +622,7 @@ def test_add_1000_api_calls_when_count_is_empty(mocker: MockerFixture) -> None: subscription_mock.assert_not_called() -def test_add_1000_api_calls_when_chargebee_api_error_has_error_code( +def test_add_100k_api_calls_when_chargebee_api_error_has_error_code( mocker: MockerFixture, ) -> None: # Given @@ -641,7 +641,7 @@ def test_add_1000_api_calls_when_chargebee_api_error_has_error_code( # When / Then with pytest.raises(UpgradeAPIUsagePaymentFailure): - add_1000_api_calls( + add_100k_api_calls( addon_id=ADDITIONAL_API_SCALE_UP_ADDON_ID, subscription_id="subscription23", count=1, @@ -649,7 +649,7 @@ def test_add_1000_api_calls_when_chargebee_api_error_has_error_code( ) -def test_add_1000_api_calls_when_chargebee_api_error_has_no_error_code( +def test_add_100k_api_calls_when_chargebee_api_error_has_no_error_code( mocker: MockerFixture, ) -> None: # Given @@ -668,7 +668,7 @@ def test_add_1000_api_calls_when_chargebee_api_error_has_no_error_code( # When / Then with pytest.raises(UpgradeAPIUsageError): - add_1000_api_calls( + add_100k_api_calls( addon_id=ADDITIONAL_API_SCALE_UP_ADDON_ID, subscription_id="subscription23", count=1, diff --git a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py index 6c3edf1d8b7f..2cbe1daee0d8 100644 --- a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py +++ b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py @@ -15,7 +15,7 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): "organisations.subscription_info_cache.get_top_organisations" ) mocked_get_top_organisations.side_effect = lambda t, _: { - organisation.id: organisation_usage.get(t) + organisation.id: organisation_usage.get(f"{t[1:]}") } chargebee_metadata = ChargebeeObjMetadata(seats=15, api_calls=1000000) @@ -59,7 +59,7 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): assert mocked_get_top_organisations.call_count == 3 assert [call[0] for call in mocked_get_top_organisations.call_args_list] == [ - ("30d", ""), - ("7d", ""), - ("24h", "100"), + ("-30d", ""), + ("-7d", ""), + ("-24h", "100"), ] diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 8082698ef5c7..cf6444216e94 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -312,7 +312,7 @@ def test_handle_api_usage_notifications_below_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "14d") + mock_api_usage.assert_called_once_with(organisation.id, "-14d") assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -405,7 +405,7 @@ def test_handle_api_usage_notifications_above_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "14d") + mock_api_usage.assert_called_once_with(organisation.id, "-14d") assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -476,7 +476,7 @@ def test_charge_for_api_call_count_overages_scale_up( organisation=organisation, allowed_seats=10, allowed_projects=3, - allowed_30d_api_calls=10_000, + allowed_30d_api_calls=100_000, chargebee_email="test@example.com", current_billing_term_starts_at=now - timedelta(days=30), current_billing_term_ends_at=now + timedelta(minutes=30), @@ -498,7 +498,7 @@ def test_charge_for_api_call_count_overages_scale_up( mock_api_usage = mocker.patch( "organisations.tasks.get_current_api_usage", ) - mock_api_usage.return_value = 12_005 + mock_api_usage.return_value = 212_005 assert OrganisationAPIBilling.objects.count() == 0 # When @@ -511,7 +511,7 @@ def test_charge_for_api_call_count_overages_scale_up( "addons": [ { "id": "additional-api-scale-up-monthly", - "quantity": 2, # Two thousand API requests. + "quantity": 2, # 200k API requests. } ], "prorate": False, @@ -522,11 +522,54 @@ def test_charge_for_api_call_count_overages_scale_up( assert OrganisationAPIBilling.objects.count() == 1 api_billing = OrganisationAPIBilling.objects.first() assert api_billing.organisation == organisation - assert api_billing.api_overage == 2000 + assert api_billing.api_overage == 200_000 assert api_billing.immediate_invoice is False assert api_billing.billed_at == now +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_charge_for_api_call_count_overages_grace_period( + organisation: Organisation, + mocker: MockerFixture, +) -> None: + # Given + now = timezone.now() + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100_000, + chargebee_email="test@example.com", + current_billing_term_starts_at=now - timedelta(days=30), + current_billing_term_ends_at=now + timedelta(minutes=30), + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = "scale-up-v2" + organisation.subscription.save() + OrganisationAPIUsageNotification.objects.create( + organisation=organisation, + percent_usage=100, + notified_at=now, + ) + + mock_chargebee_update = mocker.patch( + "organisations.chargebee.chargebee.chargebee.Subscription.update" + ) + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + # Set the return value to something less than 200% of base rate + mock_api_usage.return_value = 115_000 + assert OrganisationAPIBilling.objects.count() == 0 + + # When + charge_for_api_call_count_overages() + + # Then + mock_chargebee_update.assert_not_called() + assert OrganisationAPIBilling.objects.count() == 0 + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_charge_for_api_call_count_overages_with_not_covered_plan( organisation: Organisation, @@ -575,7 +618,7 @@ def test_charge_for_api_call_count_overages_with_not_covered_plan( @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") -def test_charge_for_api_call_count_overages_sub_1_api_usage_ratio( +def test_charge_for_api_call_count_overages_under_api_limit( organisation: Organisation, mocker: MockerFixture, ) -> None: @@ -629,7 +672,7 @@ def test_charge_for_api_call_count_overages_start_up( organisation=organisation, allowed_seats=10, allowed_projects=3, - allowed_30d_api_calls=10_000, + allowed_30d_api_calls=100_000, chargebee_email="test@example.com", current_billing_term_starts_at=now - timedelta(days=30), current_billing_term_ends_at=now + timedelta(minutes=30), @@ -651,7 +694,7 @@ def test_charge_for_api_call_count_overages_start_up( mock_api_usage = mocker.patch( "organisations.tasks.get_current_api_usage", ) - mock_api_usage.return_value = 12_005 + mock_api_usage.return_value = 202_005 assert OrganisationAPIBilling.objects.count() == 0 # When @@ -664,7 +707,7 @@ def test_charge_for_api_call_count_overages_start_up( "addons": [ { "id": "additional-api-start-up-monthly", - "quantity": 2, # Two thousand API requests. + "quantity": 2, # 200k API requests. } ], "prorate": False, @@ -675,19 +718,83 @@ def test_charge_for_api_call_count_overages_start_up( assert OrganisationAPIBilling.objects.count() == 1 api_billing = OrganisationAPIBilling.objects.first() assert api_billing.organisation == organisation - assert api_billing.api_overage == 2000 + assert api_billing.api_overage == 200_000 assert api_billing.immediate_invoice is False assert api_billing.billed_at == now # Now attempt to rebill the account should fail calls_mock = mocker.patch( - "organisations.tasks.add_1000_api_calls_start_up", + "organisations.tasks.add_100k_api_calls_start_up", ) charge_for_api_call_count_overages() assert OrganisationAPIBilling.objects.count() == 1 calls_mock.assert_not_called() +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_charge_for_api_call_count_overages_start_up_with_api_billing( + organisation: Organisation, + mocker: MockerFixture, +) -> None: + # Given + now = timezone.now() + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100_000, + chargebee_email="test@example.com", + current_billing_term_starts_at=now - timedelta(days=30), + current_billing_term_ends_at=now + timedelta(minutes=30), + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = "startup-v2" + organisation.subscription.save() + OrganisationAPIUsageNotification.objects.create( + organisation=organisation, + percent_usage=100, + notified_at=now, + ) + + OrganisationAPIBilling.objects.create( + organisation=organisation, + api_overage=100_000, + immediate_invoice=False, + billed_at=now, + ) + + mocker.patch("organisations.chargebee.chargebee.chargebee.Subscription.retrieve") + mock_chargebee_update = mocker.patch( + "organisations.chargebee.chargebee.chargebee.Subscription.update" + ) + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 202_005 + assert OrganisationAPIBilling.objects.count() == 1 + + # When + charge_for_api_call_count_overages() + + # Then + mock_chargebee_update.assert_called_once_with( + organisation.subscription.subscription_id, + { + "addons": [ + { + "id": "additional-api-start-up-monthly", + "quantity": 1, # 100k API requests. + } + ], + "prorate": False, + "invoice_immediately": False, + }, + ) + + assert OrganisationAPIBilling.objects.count() == 2 + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_charge_for_api_call_count_overages_with_yearly_account( organisation: Organisation, diff --git a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py index 889e42b9655a..64ea8465bf4a 100644 --- a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py +++ b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py @@ -1,5 +1,9 @@ import pytest from django.test import RequestFactory +from django.urls import reverse +from pytest_django.fixtures import SettingsWrapper +from pytest_mock import MockerFixture +from rest_framework.test import APIClient from organisations.models import ( Organisation, @@ -30,3 +34,32 @@ def test_organisation_subscription_get_api_call_overage( result = view.get_queryset().get(pk=organisation.id) assert result.overage == expected_overage + + +def test_get_organisation_info__get_event_list_for_organisation( + organisation: Organisation, + superuser_client: APIClient, + settings: SettingsWrapper, + mocker: MockerFixture, +) -> None: + # Given + settings.INFLUXDB_TOKEN = "AFancyToken" + + url = reverse("sales_dashboard:organisation_info", args=[organisation.id]) + + event_list_mock = mocker.patch( + "sales_dashboard.views.get_event_list_for_organisation" + ) + event_list_mock.return_value = ( + {"traits": [], "identities": [], "flags": [], "environment-document": []}, + ["label1", "label2"], + ) + mocker.patch("sales_dashboard.views.get_events_for_organisation") + + # When + response = superuser_client.get(url) + + # Then + assert "label1" in str(response.content) + assert "label2" in str(response.content) + event_list_mock.assert_called_once_with(organisation.id, "-180d", "now()") diff --git a/api/tests/unit/segments/test_unit_segments_views.py b/api/tests/unit/segments/test_unit_segments_views.py index 74fa7ae22127..95b53c012a2f 100644 --- a/api/tests/unit/segments/test_unit_segments_views.py +++ b/api/tests/unit/segments/test_unit_segments_views.py @@ -3,8 +3,10 @@ import pytest from django.contrib.auth import get_user_model +from django.contrib.contenttypes.models import ContentType from django.urls import reverse from flag_engine.segments.constants import EQUAL +from pytest_django import DjangoAssertNumQueries from pytest_django.fixtures import SettingsWrapper from pytest_lazyfixture import lazy_fixture from rest_framework import status @@ -15,7 +17,7 @@ from audit.related_object_type import RelatedObjectType from environments.models import Environment from features.models import Feature -from metadata.models import MetadataModelField +from metadata.models import Metadata, MetadataModelField from projects.models import Project from projects.permissions import MANAGE_SEGMENTS, VIEW_PROJECT from segments.models import Condition, Segment, SegmentRule, WhitelistedSegment @@ -181,6 +183,29 @@ def test_audit_log_created_when_segment_updated(project, segment, client): ) +@pytest.mark.parametrize( + "client", + [lazy_fixture("admin_master_api_key_client"), lazy_fixture("admin_client")], +) +def test_can_patch_segment(project, segment, client): + # Given + segment = Segment.objects.create(name="Test segment", project=project) + url = reverse( + "api-v1:projects:project-segments-detail", + args=[project.id, segment.id], + ) + data = { + "name": "New segment name", + "rules": [{"type": "ALL", "rules": [], "conditions": []}], + } + + # When + res = client.patch(url, data=json.dumps(data), content_type="application/json") + + # Then + assert res.status_code == status.HTTP_200_OK + + @pytest.mark.parametrize( "client", [lazy_fixture("admin_master_api_key_client"), lazy_fixture("admin_client")], @@ -337,16 +362,28 @@ def test_get_segment_by_uuid(client, project, segment): @pytest.mark.parametrize( "client, num_queries", [ - (lazy_fixture("admin_master_api_key_client"), 16), - (lazy_fixture("admin_client"), 15), + (lazy_fixture("admin_master_api_key_client"), 12), + (lazy_fixture("admin_client"), 11), ], ) -def test_list_segments(django_assert_num_queries, project, client, num_queries): +def test_list_segments( + django_assert_num_queries: DjangoAssertNumQueries, + project: Project, + client: APIClient, + num_queries: int, + required_a_segment_metadata_field: MetadataModelField, +): # Given num_segments = 5 segments = [] for i in range(num_segments): segment = Segment.objects.create(project=project, name=f"segment {i}") + Metadata.objects.create( + object_id=segment.id, + content_type=ContentType.objects.get_for_model(segment), + model_field=required_a_segment_metadata_field, + field_value="test", + ) all_rule = SegmentRule.objects.create( segment=segment, type=SegmentRule.ALL_RULE ) diff --git a/api/tests/unit/util/test_logging.py b/api/tests/unit/util/test_logging.py index 2e85a7eb2f10..d8c31a19f9f3 100644 --- a/api/tests/unit/util/test_logging.py +++ b/api/tests/unit/util/test_logging.py @@ -1,28 +1,74 @@ import json import logging +import os + +import pytest from util.logging import JsonFormatter -def test_json_formatter__outputs_expected(): +@pytest.fixture +def inspecting_handler() -> logging.Handler: + class InspectingHandler(logging.Handler): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.messages = [] + + def handle(self, record): + self.messages.append(self.format(record)) + + return InspectingHandler() + + +@pytest.mark.freeze_time("2023-12-08T06:05:47.320000+00:00") +def test_json_formatter__outputs_expected( + inspecting_handler: logging.Handler, + request: pytest.FixtureRequest, +) -> None: + # Given json_formatter = JsonFormatter() - log_record = logging.LogRecord( - name="test_logger", - level=logging.INFO, - pathname="test.py", - lineno=42, - msg="This is a test message with args: %s and %s", - args=("arg1", "arg2"), - exc_info=None, + inspecting_handler.setFormatter(json_formatter) + logger = logging.getLogger("test_json_formatter__outputs_expected") + logger.addHandler(inspecting_handler) + logger.setLevel(logging.INFO) + + expected_pid = os.getpid() + expected_module_path = os.path.abspath(request.path) + expected_tb_string = ( + "Traceback (most recent call last):\n" + f' File "{expected_module_path}",' + " line 47, in _log_traceback\n" + " raise Exception()\nException" ) - formatted_message = json_formatter.format(log_record) - json_message = json.loads(formatted_message) - - assert "levelname" in json_message - assert "message" in json_message - assert "timestamp" in json_message - assert "logger_name" in json_message - assert "process_id" in json_message - assert "thread_name" in json_message - assert json_message["message"] == "This is a test message with args: arg1 and arg2" + + def _log_traceback() -> None: + try: + raise Exception() + except Exception as exc: + logger.error("this is an error", exc_info=exc) + + # When + logger.info("hello %s, %d", "arg1", 22.22) + _log_traceback() + + # Then + assert [json.loads(message) for message in inspecting_handler.messages] == [ + { + "levelname": "INFO", + "message": "hello arg1, 22", + "timestamp": "2023-12-08 06:05:47,319", + "logger_name": "test_json_formatter__outputs_expected", + "process_id": expected_pid, + "thread_name": "MainThread", + }, + { + "levelname": "ERROR", + "message": "this is an error", + "timestamp": "2023-12-08 06:05:47,319", + "logger_name": "test_json_formatter__outputs_expected", + "process_id": expected_pid, + "thread_name": "MainThread", + "exc_info": expected_tb_string, + }, + ] diff --git a/api/util/logging.py b/api/util/logging.py index 32a81d08b2e1..27aa7117abf9 100644 --- a/api/util/logging.py +++ b/api/util/logging.py @@ -14,7 +14,7 @@ class JsonFormatter(logging.Formatter): def get_json_record(self, record: logging.LogRecord) -> dict[str, Any]: formatted_message = record.getMessage() - return { + json_record = { "levelname": record.levelname, "message": formatted_message, "timestamp": self.formatTime(record, self.datefmt), @@ -22,6 +22,9 @@ def get_json_record(self, record: logging.LogRecord) -> dict[str, Any]: "process_id": record.process, "thread_name": record.threadName, } + if record.exc_info: + json_record["exc_info"] = self.formatException(record.exc_info) + return json_record def format(self, record: logging.LogRecord) -> str: try: diff --git a/api/util/mappers/engine.py b/api/util/mappers/engine.py index e5e8c733ceef..90150206ed17 100644 --- a/api/util/mappers/engine.py +++ b/api/util/mappers/engine.py @@ -205,8 +205,8 @@ def map_environment_to_engine( latest_environment_feature_version_uuids=( { efv.uuid - for efv in EnvironmentFeatureVersion.objects.get_latest_versions( - environment + for efv in EnvironmentFeatureVersion.objects.get_latest_versions_by_environment_id( + environment.id ) } if environment.use_v2_feature_versioning diff --git a/api/webhooks/webhooks.py b/api/webhooks/webhooks.py index 3b12200a1872..73e7a082d76d 100644 --- a/api/webhooks/webhooks.py +++ b/api/webhooks/webhooks.py @@ -41,6 +41,7 @@ class WebhookEventType(enum.Enum): NEW_VERSION_PUBLISHED = "NEW_VERSION_PUBLISHED" FEATURE_EXTERNAL_RESOURCE_ADDED = "FEATURE_EXTERNAL_RESOURCE_ADDED" FEATURE_EXTERNAL_RESOURCE_REMOVED = "FEATURE_EXTERNAL_RESOURCE_REMOVED" + SEGMENT_OVERRIDE_DELETED = "SEGMENT_OVERRIDE_DELETED" class WebhookType(enum.Enum): diff --git a/docs/docs/advanced-use/change-requests.md b/docs/docs/advanced-use/change-requests.md index fe82c945884e..405041b55686 100644 --- a/docs/docs/advanced-use/change-requests.md +++ b/docs/docs/advanced-use/change-requests.md @@ -17,8 +17,7 @@ team member. They work in a similar way to Pull Requests in git. ## Setting up Change Requests Change Requests are configured at the Environment level. To enable Change Requests, go to the Environment Settings Page, -Enable 4 Eyes Change Request Approvals, and select how many approvals you would like for each Change Request to be -applied. +Enable the Change Request setting, and select how many approvals you would like for each Change Request to be applied. ## Creating a Change Request diff --git a/docs/docs/clients/client-side/ios.md b/docs/docs/clients/client-side/ios.md index 3f97761f7c38..5a171ec772d0 100644 --- a/docs/docs/clients/client-side/ios.md +++ b/docs/docs/clients/client-side/ios.md @@ -118,6 +118,36 @@ Flagsmith.shared.getTraits(forIdentity: "test_user@test.com") {(result) in } ``` +To retrieve a flag for a particular identity: + +```swift +Flagsmith.shared.getFeatureFlags(forIdentity: "test_user@test.com") {(result) in + switch result { + case .success(let flags): + for flag in flags { + let name = flag.feature.name + let value = flag.value?.stringValue + let enabled = flag.enabled + print(name, "= enabled:", enabled, "value:", value ?? "nil") + } + case .failure(let error): + print(error) + } +} +``` + +If you would prefer to do this using async/await you can do the following: + +```swift +let flags = try await Flagsmith.shared.getFeatureFlags(forIdentity: "test_user@test.com") +for flag in flags { + let name = flag.feature.name + let value = flag.value?.stringValue + let enabled = flag.enabled + print(name, "= enabled:", enabled, "value:", value ?? "nil") +} +``` + ## Override Default Configuration In `AppDelegate.swift`: diff --git a/docs/docs/clients/overview.md b/docs/docs/clients/overview.md index ae063fd7bfa8..4ad84ca4e3f6 100644 --- a/docs/docs/clients/overview.md +++ b/docs/docs/clients/overview.md @@ -346,7 +346,10 @@ are all computed locally. - Identities and their Traits are **not** read from or written to the Flagsmith API, and so are not persisted in the datastore. This means that you have to provide the full complement of Traits when requesting the Flags for a particular Identity. Our SDKs all provide relevant methods to achieve this. -- [Identity overrides](../basic-features/managing-identities#identity-overrides) do not operate at all. +- [Identity overrides](../basic-features/managing-identities#identity-overrides) work with self-hosted Flagsmith + instances. We're rolling them out gradually for the SaaS version. If you are a SaaS customer, + contact + us to try them out! - [Analytics-based Integrations](/integrations/overview#analytics-platforms) do not run. [Flag Analytics](/advanced-use/flag-analytics) do still work, if enabled within the [SDK setup](/clients/server-side#configuring-the-sdk). diff --git a/docs/docs/deployment/overview.md b/docs/docs/deployment/overview.md index d4406e545ecc..db88edc2840f 100644 --- a/docs/docs/deployment/overview.md +++ b/docs/docs/deployment/overview.md @@ -475,7 +475,6 @@ The list of the flags and remote config we're currently using in production is b { "value": "IN", "label": "In", - "warning": "Check your SDK version supports the IN operator. See SDK compatibility docs.", "valuePlaceholder": "Value1,Value2" }, { diff --git a/docs/docs/integrations/project-management/github.md b/docs/docs/integrations/project-management/github.md index d18837208413..075e22e8acbf 100644 --- a/docs/docs/integrations/project-management/github.md +++ b/docs/docs/integrations/project-management/github.md @@ -7,22 +7,24 @@ hide_title: true GitHub Logo -View your Flagsmith Flags inside your GitHub Issues and Pull Request +View your Flagsmith Flags inside GitHub Issues and Pull Requests. :::tip -- The GitHub integration is currently only supported with our hosted Flagsmith SaaS service. +The GitHub integration is currently only supported with our hosted Flagsmith SaaS service. ::: ## Integration Setup +You can either set up the integration from the Flagsmith side or from the Github side. + ### From Flagsmith -1. In the Integrations Option in the side bar, find the GitHub integration and click on 'Add Integration'. -2. A window will open asking you to select an organization you belong to. +1. In the Integrations Option in the top navigation bar, find the GitHub integration and click on 'Add Integration'. +2. A window will open asking you to select the organization you belong to. 3. Select the repositories and save. -4. In the Flagsmith application, the button will now say "Manage Integration", click on it. +4. Back in the Flagsmith application, click on the 'Manage Integration' button. 5. Finally, select the repository you wish to link. ### From GitHub @@ -38,13 +40,11 @@ View your Flagsmith Flags inside your GitHub Issues and Pull Request ## Adding a Flagsmith Flag to a GitHub issue or pull request 1. Create or select a Feature Flag. -2. Go to settings section. -3. Select your integration. -4. Select GitHub Issue or GitHub PR. -5. Select your external resource and save. +2. Go to the 'Link' Tab inside the Feature modal. +3. Select your GitHub integration. +4. Select GitHub Issue or GitHub PR and Save. -## Delete GitHub Integration +## Removing the GitHub Integration -1. In the Integrations Option in the side bar, find the GitHub integration and click on 'Manage Integration'. -2. Click on 'Delete Integracion' button, and confirm. -3. In your GitHub organisation, uninstall the Flagsmith GitHub App. +1. From Flagsmith, click 'Integrations', find the GitHub integration and click on 'Manage Integration'. +2. Click on 'Delete Integration' button, and confirm. diff --git a/docs/docs/platform/contributing.md b/docs/docs/platform/contributing.md index 907f983e8111..111797e8a746 100644 --- a/docs/docs/platform/contributing.md +++ b/docs/docs/platform/contributing.md @@ -16,6 +16,22 @@ suggesting the next steps. - If your PR involves a lot of commits, squash them using `git rebase -i` as this makes it easier for us to review. - Keep lines under 80 characters. +### Conventional Commits + +Please prefix the title of your PRs with one of the following +[Conventional Commit](https://www.conventionalcommits.org/en/v1.0.0/#summary) labels. E.g. `feat: My great feature`: + +- **feat**: A new feature or improvement to an existing feature +- **fix**: A bug fix +- **infra**: For Infrastructure-as-code type work +- **ci**: Changes to our CI/CD or build setup +- **docs**: Documentation only changes +- **deps**: Updating library dependencies +- **perf**: A code change that improves performance +- **refactor**: A code change that neither fixes a bug nor adds a feature +- **test**: Adding missing tests or correcting existing tests +- **chore**: Changes that don’t really live anywhere else (_please try NOT to use this label if possible_) + ## Pre-commit The application uses pre-commit configuration ( `.pre-commit-config.yaml` ) to run `black`, `flake8` and `isort` diff --git a/docs/docs/system-administration/authentication/02-Okta.md b/docs/docs/system-administration/authentication/02-Okta.md index 6561e09e2080..f282d8d9c2fc 100644 --- a/docs/docs/system-administration/authentication/02-Okta.md +++ b/docs/docs/system-administration/authentication/02-Okta.md @@ -3,11 +3,12 @@ title: Okta --- Flagsmith can integrate with your Okta single sign-on (SSO) by using [SAML](/system-administration/authentication/SAML). +We provide a [first-party Okta integration](https://www.okta.com/integrations/flagsmith/) to simplify the setup. ## Prerequisites (SaaS) -Get in touch with Flagsmith support to obtain the single sign-on URL and audience URI to use when creating your Okta -SAML application. +Get in touch with Flagsmith support to obtain the single sign-on URL and audience URI to use when configuring your Okta +application. ## Prerequisites (self-hosted) @@ -15,16 +16,33 @@ Create a SAML configuration by following the [instructions to set up SAML](/system-administration/authentication/01-SAML/index.md#setup-self-hosted). Leave the identity provider metadata blank for now. -## Setup +## Procedure -[Create an Okta SAML application](https://help.okta.com/oag/en-us/content/topics/access-gateway/add-app-saml-pass-thru-add-okta.htm) -from the Okta management to represent your Flagsmith organisation with the following settings: +Add the [first-party Flagsmith integration](https://www.okta.com/integrations/flagsmith/) to your Okta account. Then, +open it in the Okta dashboard and: -- **Single sign-on URL**: Obtain this URL from Flagsmith support, or from your - [SAML configuration if self-hosting](/system-administration/authentication/SAML/#assertion-consumer-service-url) -- **Audience URI (SP Entity ID)**: Obtain this from Flagsmith support, or use your - [SAML configuration name](/system-administration/authentication/SAML/#setup-self-hosted) if self-hosting +- Select the "Sign On" tab and click "Edit" +- Scroll down to "Advanced Sign-on Settings", fill out the two fields and then click Save: + - **API Base URL** should be `https://api.flagsmith.com` on SaaS, or your API root URL otherwise + - **SAML Organisation** will be provided by Flagsmith support on SaaS. Otherwise, this refers to the "Organisation + name" field [when creating a SAML Configuration](/system-administration/authentication/SAML/#setup-self-hosted) +- Staying on the "Sign On" tab, find the "Metadata URL" in the "Sign on methods" section. Save this metadata to a file + and send it to [Flagsmith support](mailto:support@flagsmith.com), or add it to the "IdP Metadata XML" field of your + Flagsmith SAML Configuration if self-hosting -Once your Okta application is created, you can -[download its corresponding identity provider metadata](https://support.okta.com/help/s/article/Location-to-download-Okta-IDP-XML-metadata-for-a-SAML-app-in-the-new-Admin-User-Interface?language=en_US) -and send it to Flagsmith support, or add it to your SAML configuration if self-hosting. +Once Flagsmith support have confirmed that the metadata has been uploaded, your users will be able to sign in via the +Okta applications dashboard and the Flagsmith dashboard by entering the organisation name given to you by Flagsmith +support, or the SAML configuration name if self-hosting. + +## User attributes + +By default, Flagsmith's Okta integration will map your users' email address, given name and surname so that they are +visible within Flagsmith. If you need to map different attributes, please +[contact support](mailto:support@flagsmith.com) or refer to the +[documentation on SAML attribute mappings](/system-administration/authentication/SAML/#attribute-mapping). + +## Troubleshooting + +If your users are unable to sign in to the Flagsmith application via Okta, it’s important to check if they already have +a user account in Flagsmith with their Okta email address. If they do, make sure that they are not a member of any other +organisations than the one set up in the Okta integration. diff --git a/docs/docs/system-administration/importing-and-exporting/launchdarkly.md b/docs/docs/system-administration/importing-and-exporting/launchdarkly.md index a713a96a2cb3..8b6c9d585c2a 100644 --- a/docs/docs/system-administration/importing-and-exporting/launchdarkly.md +++ b/docs/docs/system-administration/importing-and-exporting/launchdarkly.md @@ -1,11 +1,11 @@ --- -title: LaunchDarkly Importer -description: Import your LaunchDarkly data into Flagsmith +title: LaunchDarkly Migrator - Migrate from LaunchDarkly +description: Migrate your flags and project from LaunchDarkly into Flagsmith sidebar_label: LaunchDarkly sidebar_position: 10 --- -# LaunchDarkly Importer +# LaunchDarkly Migrator - Migrate from LaunchDarkly You can import your Flags and Segments from LaunchDarkly into Flagsmith. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 357de5999a9e..6f2a0b48ea5b 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -219,6 +219,10 @@ const config = { }), ], ], + + scripts: ['/js/crisp-chat.js'], + + clientModules: [require.resolve('./plugins/crisp-chat-links.js')], }; export default config; diff --git a/docs/plugins/crisp-chat-links.js b/docs/plugins/crisp-chat-links.js new file mode 100644 index 000000000000..940db0769287 --- /dev/null +++ b/docs/plugins/crisp-chat-links.js @@ -0,0 +1,32 @@ +// Plugin code based on https://stackoverflow.com/a/74736980 + +import ExecutionEnvironment from '@docusaurus/ExecutionEnvironment'; + +const enableCrispLinks = () => { + document.querySelectorAll('.open-chat').forEach((oc) => { + oc.onclick = ({ target }) => { + if (typeof $crisp !== 'undefined') { + $crisp.push(['do', 'chat:open']); + if (target.dataset.crispChatMessage) { + $crisp.push(['set', 'message:text', [e.target.dataset.crispChatMessage]]); + } + } + }; + }); +}; + +export function onRouteDidUpdate({ location, previousLocation }) { + // Don't execute if we are still on the same page; the lifecycle may be fired + // because the hash changes (e.g. when navigating between headings) + if (location.pathname === previousLocation?.pathname) return; + enableCrispLinks(); +} + +if (ExecutionEnvironment.canUseDOM) { + // We also need to setCodeRevealTriggers when the page first loads; otherwise, + // after reloading the page, these triggers will not be set until the user + // navigates somewhere. + window.addEventListener('load', () => { + setTimeout(enableCrispLinks, 1000); + }); +} diff --git a/docs/static/js/crisp-chat.js b/docs/static/js/crisp-chat.js new file mode 100644 index 000000000000..d65ea10869f5 --- /dev/null +++ b/docs/static/js/crisp-chat.js @@ -0,0 +1,9 @@ +window.$crisp = []; +window.CRISP_WEBSITE_ID = '8857f89e-0eb5-4263-ab49-a293872b6c19'; +(function () { + d = document; + s = d.createElement('script'); + s.src = 'https://client.crisp.chat/l.js'; + s.async = 1; + d.getElementsByTagName('head')[0].appendChild(s); +})(); diff --git a/frontend/Makefile b/frontend/Makefile index 3b8cb33f715d..446029723865 100644 --- a/frontend/Makefile +++ b/frontend/Makefile @@ -2,6 +2,8 @@ DOTENV_OVERRIDE_FILE ?= .env +E2E_CONCURRENCY ?= 3 + -include .env-local -include $(DOTENV_OVERRIDE_FILE) @@ -20,3 +22,8 @@ build: .PHONY: serve serve: npm run dev + +.PHONY: test +test: + docker compose -f docker-compose-e2e-tests.yml run frontend \ + npx cross-env E2E_CONCURRENCY=${E2E_CONCURRENCY} npm run test -- $(opts) diff --git a/frontend/common/constants.ts b/frontend/common/constants.ts index 6e3294610f63..12a7c62c0222 100644 --- a/frontend/common/constants.ts +++ b/frontend/common/constants.ts @@ -477,8 +477,18 @@ export default { projectPermissions: (perm: string) => `To use this feature you need the ${perm} permission for this project.
Please contact a member of this project who has administrator privileges.`, resourceTypes: { - GITHUB_ISSUE: { id: 1, label: 'GitHub Issue', type: 'GITHUB' }, - GITHUB_PR: { id: 2, label: 'GitHub PR', type: 'GITHUB' }, + GITHUB_ISSUE: { + id: 1, + label: 'Issue', + resourceType: 'issues', + type: 'GITHUB', + }, + GITHUB_PR: { + id: 2, + label: 'Pull Request', + resourceType: 'pulls', + type: 'GITHUB', + }, }, roles: { 'ADMIN': 'Organisation Administrator', diff --git a/frontend/common/providers/ProjectProvider.js b/frontend/common/providers/ProjectProvider.js index a25a3c24f211..cf91d54a0c1a 100644 --- a/frontend/common/providers/ProjectProvider.js +++ b/frontend/common/providers/ProjectProvider.js @@ -24,6 +24,7 @@ const ProjectProvider = class extends React.Component { this.setState( Object.assign( { + error: ProjectStore.error, isLoading: ProjectStore.isLoading, isSaving: ProjectStore.isSaving, }, diff --git a/frontend/common/services/useFeatureVersion.ts b/frontend/common/services/useFeatureVersion.ts index 2950ee378ee5..91fa9449d740 100644 --- a/frontend/common/services/useFeatureVersion.ts +++ b/frontend/common/services/useFeatureVersion.ts @@ -27,6 +27,7 @@ export const featureVersionService = service await createFeatureVersion(getStore(), { environmentId: query.environmentId, featureId: query.featureId, + liveFrom: query.liveFrom, }) // Step 2: Get the feature states for the live version @@ -38,81 +39,83 @@ export const featureVersionService = service }) // Step 3: update, create or delete feature states from the new version - const res: { data: FeatureState }[] = await Promise.all( - query.featureStates.map((featureState) => { - const matchingVersionState = currentFeatureStates.data.find( - (feature) => { - return ( - feature.feature_segment?.segment === - featureState.feature_segment?.segment - ) - }, - ) - // Matching feature state exists, meaning we need to either modify or delete it - if (matchingVersionState) { - //Feature state is marked as to remove, delete it from the current version - if ( - featureState.toRemove && - matchingVersionState.feature_segment - ) { - return deleteFeatureSegment(getStore(), { - id: matchingVersionState.feature_segment.id, - }) - } - //Feature state is not marked as remove, so we update it - const multivariate_feature_state_values = - featureState.multivariate_feature_state_values - ? featureState.multivariate_feature_state_values?.map( - (featureStateValue) => { - const newId = - matchingVersionState?.multivariate_feature_state_values?.find( - (v) => { - return ( - v.multivariate_feature_option === - featureStateValue.multivariate_feature_option - ) - }, - ) + const res: { data: FeatureState }[] = ( + await Promise.all( + query.featureStates.map((featureState) => { + const matchingVersionState = currentFeatureStates.data.find( + (feature) => { + return ( + feature.feature_segment?.segment === + featureState.feature_segment?.segment + ) + }, + ) + // Matching feature state exists, meaning we need to either modify or delete it + if (matchingVersionState) { + //Feature state is marked as to remove, delete it from the current version + if ( + featureState.toRemove && + matchingVersionState.feature_segment + ) { + return deleteFeatureSegment(getStore(), { + id: matchingVersionState.feature_segment.id, + }) + } + //Feature state is not marked as remove, so we update it + const multivariate_feature_state_values = + featureState.multivariate_feature_state_values + ? featureState.multivariate_feature_state_values?.map( + (featureStateValue) => { + const newId = + matchingVersionState?.multivariate_feature_state_values?.find( + (v) => { + return ( + v.multivariate_feature_option === + featureStateValue.multivariate_feature_option + ) + }, + ) - return { - ...featureStateValue, - id: newId!.id, - } - }, - ) - : [] + return { + ...featureStateValue, + id: newId!.id, + } + }, + ) + : [] - return updateVersionFeatureState(getStore(), { - environmentId: query.environmentId, - featureId: matchingVersionState.feature, - featureState: { - ...featureState, - feature_segment: matchingVersionState?.feature_segment - ? { - ...(matchingVersionState.feature_segment as any), - priority: featureState.feature_segment!.priority, - } - : undefined, + return updateVersionFeatureState(getStore(), { + environmentId: query.environmentId, + featureId: matchingVersionState.feature, + featureState: { + ...featureState, + feature_segment: matchingVersionState?.feature_segment + ? { + ...(matchingVersionState.feature_segment as any), + priority: featureState.feature_segment!.priority, + } + : undefined, + id: matchingVersionState.id, + multivariate_feature_state_values, + uuid: matchingVersionState.uuid, + }, id: matchingVersionState.id, - multivariate_feature_state_values, + sha: versionRes.data.uuid, uuid: matchingVersionState.uuid, - }, - id: matchingVersionState.id, - sha: versionRes.data.uuid, - uuid: matchingVersionState.uuid, - }) - } - // Matching feature state does not exist, meaning we need to create it - else { - return createVersionFeatureState(getStore(), { - environmentId: query.environmentId, - featureId: query.featureId, - featureState, - sha: versionRes.data.uuid, - }) - } - }), - ) + }) + } + // Matching feature state does not exist, meaning we need to create it + else { + return createVersionFeatureState(getStore(), { + environmentId: query.environmentId, + featureId: query.featureId, + featureState, + sha: versionRes.data.uuid, + }) + } + }), + ) + ).filter((v) => !!v?.data) //Step 4: Update feature segment priorities before saving feature states const prioritiesToUpdate = query.featureStates @@ -133,11 +136,12 @@ export const featureVersionService = service } const ret = { - data: res.map((item) => ({ + error: res.find((v) => !!v.error)?.error, + feature_states: res.map((item) => ({ ...item, version_sha: versionRes.data.uuid, })), - error: res.find((v) => !!v.error)?.error, + version_sha: versionRes.data.uuid, } // Step 5: Publish the feature version @@ -149,7 +153,7 @@ export const featureVersionService = service }) } - return ret as any + return { data: ret } as any }, }), createFeatureVersion: builder.mutation< @@ -158,7 +162,7 @@ export const featureVersionService = service >({ invalidatesTags: [{ id: 'LIST', type: 'FeatureVersion' }], query: (query: Req['createFeatureVersion']) => ({ - body: {}, + body: { live_from: query.liveFrom }, method: 'POST', url: `environments/${query.environmentId}/features/${query.featureId}/versions/`, }), @@ -169,7 +173,7 @@ export const featureVersionService = service >({ providesTags: (res) => [{ id: res?.uuid, type: 'FeatureVersion' }], query: (query: Req['getFeatureVersion']) => ({ - url: `environments/${query.environmentId}/features/${query.featureId}/versions/${query.uuid}`, + url: `environment-feature-versions/${query.uuid}/`, }), }), getFeatureVersions: builder.query< diff --git a/frontend/common/services/useGithub.ts b/frontend/common/services/useGithub.ts index 600b01bca300..47fe41a27bc9 100644 --- a/frontend/common/services/useGithub.ts +++ b/frontend/common/services/useGithub.ts @@ -7,21 +7,6 @@ export const githubService = service .enhanceEndpoints({ addTagTypes: ['Github'] }) .injectEndpoints({ endpoints: (builder) => ({ - getGithubIssues: builder.query< - Res['githubIssues'], - Req['getGithubIssues'] - >({ - providesTags: [{ id: 'LIST', type: 'Github' }], - query: (query: Req['getGithubIssues']) => ({ - url: `organisations/${query.organisation_id}/github/issues/?repo_name=${query.repo_name}&repo_owner=${query.repo_owner}`, - }), - }), - getGithubPulls: builder.query({ - providesTags: [{ id: 'LIST', type: 'Github' }], - query: (query: Req['getGithubPulls']) => ({ - url: `organisations/${query.organisation_id}/github/pulls/?repo_name=${query.repo_name}&repo_owner=${query.repo_owner}`, - }), - }), getGithubRepos: builder.query({ providesTags: [{ id: 'LIST', type: 'Github' }], query: (query: Req['getGithubRepos']) => ({ @@ -32,30 +17,30 @@ export const githubService = service })}`, }), }), + getGithubResources: builder.query< + Res['githubResources'], + Req['getGithubResources'] + >({ + providesTags: [{ id: 'LIST', type: 'Github' }], + query: (query: Req['getGithubResources']) => ({ + url: + `organisations/${query.organisation_id}/github/${query.github_resource}/` + + `?repo_name=${query.repo_name}&repo_owner=${query.repo_owner}&page_size=${query.page_size}&page=${query.page}&search_text=${query.q}`, + }), + }), // END OF ENDPOINTS }), }) -export async function getGithubIssues( - store: any, - data: Req['getGithubIssues'], - options?: Parameters< - typeof githubService.endpoints.getGithubIssues.initiate - >[1], -) { - return store.dispatch( - githubService.endpoints.getGithubIssues.initiate(data, options), - ) -} -export async function getGithubPulls( +export async function getGithubResources( store: any, - data: Req['getGithubPulls'], + data: Req['getGithubResources'], options?: Parameters< - typeof githubService.endpoints.getGithubPulls.initiate + typeof githubService.endpoints.getGithubResources.initiate >[1], ) { return store.dispatch( - githubService.endpoints.getGithubPulls.initiate(data, options), + githubService.endpoints.getGithubResources.initiate(data, options), ) } export async function getGithubRepos( @@ -72,14 +57,13 @@ export async function getGithubRepos( // END OF FUNCTION_EXPORTS export const { - useGetGithubIssuesQuery, - useGetGithubPullsQuery, useGetGithubReposQuery, + useGetGithubResourcesQuery, // END OF EXPORTS } = githubService /* Usage examples: -const { data, isLoading } = useGetGithubIssuesQuery({ id: 2 }, {}) //get hook +const { data, isLoading } = useGetGithubResourcesQuery({ id: 2 }, {}) //get hook const [createGithub, { isLoading, data, isSuccess }] = useCreateGithubMutation() //create hook githubService.endpoints.getGithub.select({id: 2})(store.getState()) //access data from any function */ diff --git a/frontend/common/stores/feature-list-store.ts b/frontend/common/stores/feature-list-store.ts index 79d36b7b99de..22325753c37d 100644 --- a/frontend/common/stores/feature-list-store.ts +++ b/frontend/common/stores/feature-list-store.ts @@ -27,8 +27,9 @@ import flagsmith from 'flagsmith' import API from 'project/api' import segmentOverrides from 'components/SegmentOverrides' import { Req } from 'common/types/requests' +import { getVersionFeatureState } from 'common/services/useVersionFeatureState' let createdFirstFeature = false -const PAGE_SIZE = 200 +const PAGE_SIZE = 50 function recursivePageGet(url, parentRes) { return data.get(url).then((res) => { let response @@ -357,7 +358,11 @@ const controller = { `${Project.api}environments/${environmentId}/featurestates/${environmentFlag.id}/`, Object.assign({}, environmentFlag, { enabled: flag.default_enabled, - feature_state_value: flag.initial_value, + feature_state_value: Utils.getTypedValue( + flag.initial_value, + undefined, + true, + ), }), ) }) @@ -472,13 +477,14 @@ const controller = { ] } - const version = await createAndSetFeatureVersion(getStore(), { + const { data: version } = await createAndSetFeatureVersion(getStore(), { environmentId: env.id, featureId: projectFlag.id, featureStates, + liveFrom: changeRequest.live_from, skipPublish: true, }) - environment_feature_versions = version.data.map((v) => v.version_sha) + environment_feature_versions = [version.version_sha] } const prom = data .get( @@ -607,10 +613,24 @@ const controller = { environmentId: res, featureId: projectFlag.id, featureStates, - }).then((res) => { - if (res.error) { - throw res.error + }).then((version) => { + if (version.error) { + throw version.error } + // Fetch and update the latest environment feature state + return getVersionFeatureState(getStore(), { + environmentId: ProjectStore.getEnvironmentIdFromKey(environmentId), + featureId: projectFlag.id, + sha: version.data.version_sha, + }).then((res) => { + const environmentFeatureState = res.data.find( + (v) => !v.feature_segment, + ) + store.model.keyedEnvironmentFeatures[projectFlag.id] = { + ...store.model.keyedEnvironmentFeatures[projectFlag.id], + ...environmentFeatureState, + } + }) }) }) } else if (environmentFlag) { @@ -650,11 +670,11 @@ const controller = { environmentId: res, featureId: projectFlag.id, featureStates: [data], - }).then((res) => { - if (res.error) { - throw res.error + }).then((version) => { + if (version.error) { + throw version.error } - const featureState = res.data[0].data + const featureState = version.data.feature_states[0].data store.model.keyedEnvironmentFeatures[projectFlag.id] = { ...featureState, feature_state_value: Utils.featureStateToValue( diff --git a/frontend/common/stores/identity-store.js b/frontend/common/stores/identity-store.js index 590bc776e8cf..5efe7dd54ae9 100644 --- a/frontend/common/stores/identity-store.js +++ b/frontend/common/stores/identity-store.js @@ -1,4 +1,5 @@ import Constants from 'common/constants' +import Utils from 'common/utils/utils' const Dispatcher = require('../dispatcher/dispatcher') const BaseStore = require('./base/_store') @@ -95,7 +96,11 @@ const controller = { { enabled: identityFlag.enabled, feature: projectFlag.id, - feature_state_value: identityFlag.feature_state_value, + feature_state_value: Utils.getTypedValue( + identityFlag.feature_state_value, + undefined, + true, + ), id: identityFlag.id || identityFlag.featurestate_uuid, multivariate_feature_state_values: identityFlag.multivariate_options, @@ -109,7 +114,11 @@ const controller = { { enabled: identityFlag.enabled, feature: projectFlag.id, - feature_state_value: identityFlag.feature_state_value, + feature_state_value: Utils.getTypedValue( + identityFlag.feature_state_value, + undefined, + true, + ), multivariate_feature_state_values: identityFlag.multivariate_options, }, diff --git a/frontend/common/stores/project-store.js b/frontend/common/stores/project-store.js index 0783baf8e49a..a174fd7a77eb 100644 --- a/frontend/common/stores/project-store.js +++ b/frontend/common/stores/project-store.js @@ -15,6 +15,7 @@ const data = require('../data/base/_data') const controller = { createEnv: (name, projectId, cloneId, description, metadata) => { API.trackEvent(Constants.events.CREATE_ENVIRONMENT) + store.saving() const req = cloneId ? data.post(`${Project.api}environments/${cloneId}/clone/`, { description, @@ -25,41 +26,44 @@ const controller = { name, project: projectId, }) - - req.then((res) => - data - .put(`${Project.api}environments/${res.api_key}/`, { - description, - metadata: metadata || [], - name, - project: projectId, - }) - .then((res) => - data - .post( - `${Project.api}environments/${ - res.api_key - }/${Utils.getIdentitiesEndpoint()}/`, - { - environment: res.api_key, - identifier: `${name.toLowerCase()}_user_123456`, - }, - ) - .then(() => { - store.savedEnv = res - if (store.model && store.model.environments) { - store.model.environments = store.model.environments.concat([ - res, - ]) - } - store.saved() - getStore().dispatch( - environmentService.util.invalidateTags(['Environment']), + req + .then((res) => + data + .put(`${Project.api}environments/${res.api_key}/`, { + description, + metadata: metadata || [], + name, + project: projectId, + }) + .then((res) => + data + .post( + `${Project.api}environments/${ + res.api_key + }/${Utils.getIdentitiesEndpoint()}/`, + { + environment: res.api_key, + identifier: `${name.toLowerCase()}_user_123456`, + }, ) - AppActions.refreshOrganisation() - }), - ), - ) + .then(() => { + store.savedEnv = res + if (store.model && store.model.environments) { + store.model.environments = store.model.environments.concat([ + res, + ]) + } + store.saved() + getStore().dispatch( + environmentService.util.invalidateTags(['Environment']), + ) + AppActions.refreshOrganisation() + }), + ), + ) + .catch((e) => { + API.ajaxHandler(store, e) + }) }, deleteEnv: (env) => { @@ -69,6 +73,9 @@ const controller = { store.model.environments, (e) => e.id !== env.id, ) + getStore().dispatch( + environmentService.util.invalidateTags(['Environment']), + ) store.trigger('removed') store.saved() AppActions.refreshOrganisation() diff --git a/frontend/common/types/requests.ts b/frontend/common/types/requests.ts index 7214ef8c037a..612d468b6c94 100644 --- a/frontend/common/types/requests.ts +++ b/frontend/common/types/requests.ts @@ -311,10 +311,12 @@ export type Req = { | 'toRemove' | 'multivariate_feature_state_values' >[] + liveFrom?: string } createFeatureVersion: { environmentId: number featureId: number + liveFrom?: string } publishFeatureVersion: { sha: string @@ -341,8 +343,6 @@ export type Req = { }> getUsers: { organisationId: number } getFeatureVersion: { - environmentId: string - featureId: string uuid: string } enableFeatureVersioning: { @@ -412,16 +412,12 @@ export type Req = { repository_owner: string } } - getGithubIssues: { - organisation_id: string - repo_name: string - repo_owner: string - } - getGithubPulls: { + getGithubResources: PagedRequest<{ organisation_id: string repo_name: string repo_owner: string - } + github_resource: string + }> getGithubRepos: { installation_id: string; organisation_id: string } getServersideEnvironmentKeys: { environmentId: string } deleteServersideEnvironmentKeys: { environmentId: string; id: string } diff --git a/frontend/common/types/responses.ts b/frontend/common/types/responses.ts index f5e52816c3cd..d524670f19af 100644 --- a/frontend/common/types/responses.ts +++ b/frontend/common/types/responses.ts @@ -17,6 +17,9 @@ export type PagedResponse = { previous?: string results: T[] } +export interface GitHubPagedResponse extends PagedResponse { + incomplete_results: boolean +} export type FlagsmithValue = string | number | boolean | null export type FeatureVersionState = { @@ -112,7 +115,7 @@ export type ExternalResource = { url: string type: string project?: number - metadata: null | { status: string } + metadata?: { state?: string; title?: string } feature: number } @@ -157,155 +160,25 @@ export type LaunchDarklyProjectImport = { project: number } -export type Issue = { - url: string - repository_url: string - labels_url: string - comments_url: string - events_url: string +export type GithubResources = { html_url: string id: number number: number title: string state: string - created_at: string - updated_at: string - closed_at: null | string - body: string - timeline_url: string -} - -export type PullRequest = { - url: string - id: number - html_url: string - issue_url: string - number: number - state: string - locked: boolean - title: string - body: string | null - created_at: string - updated_at: string - closed_at: string | null - merged_at: string | null - draft: boolean - comments_url: string - statuses_url: string } export type GithubPaginatedRepos = { total_count: number repository_selection: string - repositories: T[] + results: T[] } export type Repository = { id: number - node_id: string name: string full_name: string - private: boolean - owner: { - login: string - id: number - node_id: string - avatar_url: string - gravatar_id: string - url: string - html_url: string - followers_url: string - following_url: string - gists_url: string - starred_url: string - subscriptions_url: string - organizations_url: string - repos_url: string - events_url: string - received_events_url: string - type: string - site_admin: boolean - } - html_url: string - description: string | null - fork: boolean - url: string - forks_url: string - keys_url: string - collaborators_url: string - teams_url: string - hooks_url: string - issue_events_url: string - events_url: string - assignees_url: string - branches_url: string - tags_url: string - blobs_url: string - git_tags_url: string - git_refs_url: string - trees_url: string - statuses_url: string - languages_url: string - stargazers_url: string - contributors_url: string - subscribers_url: string - subscription_url: string - commits_url: string - git_commits_url: string - comments_url: string - issue_comment_url: string - contents_url: string - compare_url: string - merges_url: string - archive_url: string - downloads_url: string - issues_url: string - pulls_url: string - milestones_url: string - notifications_url: string - labels_url: string - releases_url: string - deployments_url: string - created_at: string - updated_at: string - pushed_at: string - git_url: string - ssh_url: string - clone_url: string - svn_url: string - homepage: string | null - size: number - stargazers_count: number - watchers_count: number - language: string - has_issues: boolean - has_projects: boolean - has_downloads: boolean - has_wiki: boolean - has_pages: boolean - has_discussions: boolean - forks_count: number - mirror_url: string | null - archived: boolean - disabled: boolean - open_issues_count: number - license: string | null - allow_forking: boolean - is_template: boolean - web_commit_signoff_required: boolean - topics: string[] - visibility: string - forks: number - open_issues: number - watchers: number - default_branch: string - permissions: { - admin: boolean - maintain: boolean - push: boolean - triage: boolean - pull: boolean - } + owner: { login: string } } export type GithubRepository = { @@ -366,13 +239,15 @@ export type AuditLogItem = { author?: User environment?: Environment project: ProjectSummary - related_object_id: number + related_object_uuid?: number + related_feature_id?: number related_object_type: | 'FEATURE' | 'FEATURE_STATE' | 'ENVIRONMENT' | 'CHANGE_REQUEST' | 'SEGMENT' + | 'EF_VERSION' | 'EDGE_IDENTITY' is_system_event: boolean } @@ -635,6 +510,8 @@ export type ChangeRequest = { export type FeatureVersion = { created_at: string updated_at: string + feature?: number + previous_version_uuid?: string published: boolean live_from: string uuid: string @@ -770,8 +647,7 @@ export type Res = { externalResource: PagedResponse githubIntegrations: PagedResponse githubRepository: PagedResponse - githubIssues: Issue[] - githubPulls: PullRequest[] + githubResources: GitHubPagedResponse githubRepos: GithubPaginatedRepos segmentPriorities: {} featureSegment: FeatureState['feature_segment'] diff --git a/frontend/common/useInfiniteScroll.ts b/frontend/common/useInfiniteScroll.ts index 35b0cc813704..1dc1e3d975df 100644 --- a/frontend/common/useInfiniteScroll.ts +++ b/frontend/common/useInfiniteScroll.ts @@ -15,6 +15,7 @@ const useInfiniteScroll = < ) => { const [localPage, setLocalPage] = useState(1) const [combinedData, setCombinedData] = useState(null) + const [loadingCombinedData, setLoadingCombinedData] = useState(false) const [q, setQ] = useState('') const queryResponse = useGetDataListQuery({ @@ -39,12 +40,16 @@ const useInfiniteScroll = < } as RES }) } + setLoadingCombinedData(false) } }, //eslint-disable-next-line [queryResponse?.data] ) const searchItems = useThrottle((search: string) => { + if (q !== search) { + setLoadingCombinedData(true) + } setQ(search) setLocalPage(1) }, throttle) @@ -61,8 +66,10 @@ const useInfiniteScroll = < return { data: combinedData, + isFetching: queryResponse.isFetching, isLoading: queryResponse.isLoading, loadMore, + loadingCombinedData: loadingCombinedData && queryResponse.isFetching, refresh, response: queryResponse, searchItems, diff --git a/frontend/common/utils/utils.tsx b/frontend/common/utils/utils.tsx index c591bc043834..603e10e09199 100644 --- a/frontend/common/utils/utils.tsx +++ b/frontend/common/utils/utils.tsx @@ -462,7 +462,11 @@ const Utils = Object.assign({}, require('./base/_utils'), { return id ? 'put' : 'post' }, - getTypedValue(str: FlagsmithValue, boolToString?: boolean) { + getTypedValue( + str: FlagsmithValue, + boolToString?: boolean, + testWithTrim?: boolean, + ) { if (typeof str === 'undefined') { return '' } @@ -470,26 +474,27 @@ const Utils = Object.assign({}, require('./base/_utils'), { return str } - const isNum = /^\d+$/.test(str) + const typedValue = testWithTrim ? str.trim() : str + const isNum = /^\d+$/.test(typedValue) - if (isNum && parseInt(str) > Number.MAX_SAFE_INTEGER) { + if (isNum && parseInt(typedValue) > Number.MAX_SAFE_INTEGER) { return `${str}` } - if (str === 'true') { + if (typedValue === 'true') { if (boolToString) return 'true' return true } - if (str === 'false') { + if (typedValue === 'false') { if (boolToString) return 'false' return false } if (isNum) { if (str.indexOf('.') !== -1) { - return parseFloat(str) + return parseFloat(typedValue) } - return parseInt(str) + return parseInt(typedValue) } return str @@ -667,8 +672,8 @@ const Utils = Object.assign({}, require('./base/_utils'), { ) } }, - valueToFeatureState(value: FlagsmithValue) { - const val = Utils.getTypedValue(value) + valueToFeatureState(value: FlagsmithValue, trimSpaces = true) { + const val = Utils.getTypedValue(value, undefined, trimSpaces) if (typeof val === 'boolean') { return { diff --git a/frontend/docker-compose-e2e-tests.yml b/frontend/docker-compose-e2e-tests.yml index cda712a80ed7..4755ea1c8857 100644 --- a/frontend/docker-compose-e2e-tests.yml +++ b/frontend/docker-compose-e2e-tests.yml @@ -13,6 +13,7 @@ services: container_name: flagsmith_postgres flagsmith-api: + image: ${API_IMAGE:-ghcr.io/flagsmith/flagsmith-api:dev} build: context: ../ dockerfile: Dockerfile @@ -38,6 +39,7 @@ services: retries: 30 frontend: + image: ${E2E_IMAGE:-ghcr.io/flagsmith/flagsmith-e2e:dev} build: context: ../ dockerfile: frontend/Dockerfile.e2e diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 255e71826f03..c8713bebf74c 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -98,6 +98,8 @@ "react-tooltip": "4.5.1", "react-virtualized": "^9.22.5", "react-virtualized-auto-sizer": "1.0.2", + "react-window": "^1.8.10", + "react-window-infinite-loader": "^1.0.9", "reactstrap": "9.0.1", "recharts": "^2.1.14", "redux-persist": "^6.0.0", @@ -129,6 +131,7 @@ "@types/react-router": "^4.4.5", "@types/react-router-dom": "^4.3.1", "@types/react-select": "^2.0.3", + "@types/react-window-infinite-loader": "^1.0.9", "@typescript-eslint/eslint-plugin": "5.4.0", "@typescript-eslint/parser": "5.4.0", "eslint": "^7.6.0", @@ -4400,6 +4403,25 @@ "@types/react": "*" } }, + "node_modules/@types/react-window": { + "version": "1.8.8", + "resolved": "https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz", + "integrity": "sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-window-infinite-loader": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@types/react-window-infinite-loader/-/react-window-infinite-loader-1.0.9.tgz", + "integrity": "sha512-gEInTjQwURCnDOFyIEK2+fWB5gTjqwx30O62QfxA9stE5aiB6EWkGj4UMhc0axq7/FV++Gs/TGW8FtgEx0S6Tw==", + "dev": true, + "dependencies": { + "@types/react": "*", + "@types/react-window": "*" + } + }, "node_modules/@types/react/node_modules/csstype": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz", @@ -15531,6 +15553,34 @@ "react-is": "^16.13.1" } }, + "node_modules/react-window": { + "version": "1.8.10", + "resolved": "https://registry.npmjs.org/react-window/-/react-window-1.8.10.tgz", + "integrity": "sha512-Y0Cx+dnU6NLa5/EvoHukUD0BklJ8qITCtVEPY1C/nL8wwoZ0b5aEw8Ff1dOVHw7fCzMt55XfJDd8S8W8LCaUCg==", + "dependencies": { + "@babel/runtime": "^7.0.0", + "memoize-one": ">=3.1.1 <6" + }, + "engines": { + "node": ">8.0.0" + }, + "peerDependencies": { + "react": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-window-infinite-loader": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.9.tgz", + "integrity": "sha512-5Hg89IdU4Vrp0RT8kZYKeTIxWZYhNkVXeI1HbKo01Vm/Z7qztDvXljwx16sMzsa9yapRJQW3ODZfMUw38SOWHw==", + "engines": { + "node": ">8.0.0" + }, + "peerDependencies": { + "react": "^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0", + "react-dom": "^15.3.0 || ^16.0.0-alpha || ^17.0.0 || ^18.0.0" + } + }, "node_modules/reactstrap": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/reactstrap/-/reactstrap-9.0.1.tgz", @@ -23052,6 +23102,25 @@ "@types/react": "*" } }, + "@types/react-window": { + "version": "1.8.8", + "resolved": "https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz", + "integrity": "sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==", + "dev": true, + "requires": { + "@types/react": "*" + } + }, + "@types/react-window-infinite-loader": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@types/react-window-infinite-loader/-/react-window-infinite-loader-1.0.9.tgz", + "integrity": "sha512-gEInTjQwURCnDOFyIEK2+fWB5gTjqwx30O62QfxA9stE5aiB6EWkGj4UMhc0axq7/FV++Gs/TGW8FtgEx0S6Tw==", + "dev": true, + "requires": { + "@types/react": "*", + "@types/react-window": "*" + } + }, "@types/retry": { "version": "0.12.1", "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.1.tgz", @@ -31164,6 +31233,20 @@ "resolved": "https://registry.npmjs.org/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.2.tgz", "integrity": "sha512-MYXhTY1BZpdJFjUovvYHVBmkq79szK/k7V3MO+36gJkWGkrXKtyr4vCPtpphaTLRAdDNoYEYFZWE8LjN+PIHNg==" }, + "react-window": { + "version": "1.8.10", + "resolved": "https://registry.npmjs.org/react-window/-/react-window-1.8.10.tgz", + "integrity": "sha512-Y0Cx+dnU6NLa5/EvoHukUD0BklJ8qITCtVEPY1C/nL8wwoZ0b5aEw8Ff1dOVHw7fCzMt55XfJDd8S8W8LCaUCg==", + "requires": { + "@babel/runtime": "^7.0.0", + "memoize-one": ">=3.1.1 <6" + } + }, + "react-window-infinite-loader": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/react-window-infinite-loader/-/react-window-infinite-loader-1.0.9.tgz", + "integrity": "sha512-5Hg89IdU4Vrp0RT8kZYKeTIxWZYhNkVXeI1HbKo01Vm/Z7qztDvXljwx16sMzsa9yapRJQW3ODZfMUw38SOWHw==" + }, "reactstrap": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/reactstrap/-/reactstrap-9.0.1.tgz", diff --git a/frontend/package.json b/frontend/package.json index 63cd116a51fa..96f80db66875 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -116,6 +116,8 @@ "react-tooltip": "4.5.1", "react-virtualized": "^9.22.5", "react-virtualized-auto-sizer": "1.0.2", + "react-window": "^1.8.10", + "react-window-infinite-loader": "^1.0.9", "reactstrap": "9.0.1", "recharts": "^2.1.14", "redux-persist": "^6.0.0", @@ -147,6 +149,7 @@ "@types/react-router": "^4.4.5", "@types/react-router-dom": "^4.3.1", "@types/react-select": "^2.0.3", + "@types/react-window-infinite-loader": "^1.0.9", "@typescript-eslint/eslint-plugin": "5.4.0", "@typescript-eslint/parser": "5.4.0", "eslint": "^7.6.0", diff --git a/frontend/web/components/AdminAPIKeys.js b/frontend/web/components/AdminAPIKeys.js index 22589592733a..f2451b34111c 100644 --- a/frontend/web/components/AdminAPIKeys.js +++ b/frontend/web/components/AdminAPIKeys.js @@ -18,6 +18,8 @@ import { getRolesMasterAPIKeyWithMasterAPIKeyRoles, updateMasterAPIKeyWithMasterAPIKeyRoles, } from 'common/services/useMasterAPIKeyWithMasterAPIKeyRole' +import { setInterceptClose, setModalTitle } from './modals/base/ModalDefault' +import SuccessMessage from './SuccessMessage' export class CreateAPIKey extends PureComponent { state = { @@ -31,6 +33,28 @@ export class CreateAPIKey extends PureComponent { componentDidMount() { this.props.isEdit && this.getApiKeyByPrefix(this.props.prefix) + if (!this.props.isEdit) { + setInterceptClose(() => { + if (!this.state.key) { + return Promise.resolve(true) + } else { + return new Promise((resolve) => { + openConfirm({ + body: 'Please confirm you have a copy of the API key prior to closing.', + noText: 'Cancel', + onNo: () => resolve(false), + onYes: () => resolve(true), + title: 'Confirm saved API key', + yesText: 'Confirm', + }) + }) + } + }) + } + } + + componentWillUnmount() { + setInterceptClose(null) } submit = () => { @@ -50,6 +74,7 @@ export class CreateAPIKey extends PureComponent { isSaving: false, key: res.key, }) + setModalTitle('Save your new API key') Promise.all( this.state.roles.map((role) => createRoleMasterApiKey(getStore(), { @@ -163,6 +188,11 @@ export class CreateAPIKey extends PureComponent { { + setTimeout(() => { + v.focus() + }, 500) + }} value={this.state.name} onChange={(e) => this.setState({ name: Utils.safeParseEventValue(e) }) @@ -257,11 +287,11 @@ export class CreateAPIKey extends PureComponent { )} {this.state.key && ( -
- - Please keep a note of your API key once it's created, we do not - store it. - +
+ + Your key has been created. Please keep a note of your API key + once it's created, we do not store it. +
diff --git a/frontend/web/components/App.js b/frontend/web/components/App.js index 6dd07ff53e6b..c35417700218 100644 --- a/frontend/web/components/App.js +++ b/frontend/web/components/App.js @@ -381,22 +381,23 @@ const App = class extends Component { /> )} {user && showBanner && ( - - - this.closeAnnouncement(announcementValue.id) - } - buttonText={announcementValue.buttonText} - url={announcementValue.url} - > -
-
{announcementValue.description}
-
-
-
+
+
+ + this.closeAnnouncement(announcementValue.id) + } + buttonText={announcementValue.buttonText} + url={announcementValue.url} + > +
+
{announcementValue.description}
+
+
+
+
)} {this.props.children} diff --git a/frontend/web/components/AuditLog.tsx b/frontend/web/components/AuditLog.tsx index 4a04a4a92d84..8242acff616b 100644 --- a/frontend/web/components/AuditLog.tsx +++ b/frontend/web/components/AuditLog.tsx @@ -1,14 +1,15 @@ import React, { FC, ReactNode, useEffect, useRef, useState } from 'react' // we need this to make JSX compile -import moment from 'moment' import Utils from 'common/utils/utils' import { AuditLogItem, Environment } from 'common/types/responses' import { useGetAuditLogsQuery } from 'common/services/useAuditLog' import useSearchThrottle from 'common/useSearchThrottle' -import JSONReference from './JSONReference' import { Link, withRouter } from 'react-router-dom' -import PanelSearch from './PanelSearch' import ProjectStore from 'common/stores/project-store' +import Button from './base/forms/Button' import Tag from './tags/Tag' +import PanelSearch from './PanelSearch' +import JSONReference from './JSONReference' +import moment from 'moment' type AuditLogType = { environmentId: string @@ -25,7 +26,7 @@ type AuditLogType = { } } -const widths = [210, 210, 210] +const widths = [210, 210, 130] const AuditLog: FC = (props) => { const [page, setPage] = useState(1) const { search, searchInput, setSearchInput } = useSearchThrottle( @@ -55,13 +56,18 @@ const AuditLog: FC = (props) => { data: projectAuditLog, isError, isFetching, - } = useGetAuditLogsQuery({ - environments, - page, - page_size: props.pageSize, - project: props.projectId, - search, - }) + } = useGetAuditLogsQuery( + { + environments, + page, + page_size: props.pageSize, + project: props.projectId, + search, + }, + { + refetchOnMountOrArgChange: true, + }, + ) useEffect(() => { props.onErrorChange?.(isError) @@ -78,12 +84,30 @@ const AuditLog: FC = (props) => { environment, id, log, + project, + related_feature_id, + related_object_type, + related_object_uuid, }: AuditLogItem) => { const environments = ProjectStore.getEnvs() as Environment[] | null const index = environments?.findIndex((v) => { return v.id === environment?.id }) const colour = index === -1 ? 0 : index + let link: ReactNode = null + if ( + related_object_uuid && + related_object_type === 'EF_VERSION' && + environment + ) { + link = ( + + + + ) + } const inner = (
= (props) => { style={{ width: widths[2] }} to={`/project/${props.projectId}/environment/${environment?.api_key}/features/`} > - - - + ) : (
)} - {log} + +
+ {log} + {link} +
+
) return ( {inner} @@ -203,4 +230,4 @@ const AuditLog: FC = (props) => { ) } -export default withRouter(AuditLog) +export default withRouter(AuditLog as any) diff --git a/frontend/web/components/DeleteGithubIntegration.tsx b/frontend/web/components/DeleteGithubIntegration.tsx index b740e3e4b84d..59dda6ea80ba 100644 --- a/frontend/web/components/DeleteGithubIntegration.tsx +++ b/frontend/web/components/DeleteGithubIntegration.tsx @@ -23,10 +23,6 @@ const DeleteGithubIntegration: FC = ({ 'Delete Github Integration',
Are you sure you want to remove your GitHub integration?
-
- If you proceed, you will need to uninstall the application from - your GitHub organization in order to integrate it again. -
)} {this.props.isClosable && ( - + diff --git a/frontend/web/components/IntegrationList.js b/frontend/web/components/IntegrationList.js index 7dbe55c5a74c..aafabd35c92d 100644 --- a/frontend/web/components/IntegrationList.js +++ b/frontend/web/components/IntegrationList.js @@ -231,9 +231,13 @@ class IntegrationList extends Component { } fetchGithubIntegration = () => { - getGithubIntegration(getStore(), { - organisation_id: AccountStore.getOrganisation().id, - }).then((res) => { + getGithubIntegration( + getStore(), + { + organisation_id: AccountStore.getOrganisation().id, + }, + { forceRefetch: true }, + ).then((res) => { this.setState({ githubId: res?.data?.results[0]?.id, hasIntegrationWithGithub: !!res?.data?.results?.length, @@ -286,7 +290,6 @@ class IntegrationList extends Component { } }), ).then((res) => { - console.log(res) this.setState({ activeIntegrations: _.map(res, (item) => !!item && item.length ? item : [], @@ -371,7 +374,7 @@ class IntegrationList extends Component { } githubMeta={{ githubId: githubId, installationId: installationId }} projectId={this.props.projectId} - onComplete={githubId ? this.fetch : this.fetchGithubIntegration} + onComplete={githubId ? this.fetchGithubIntegration : this.fetch} />, 'side-modal', ) diff --git a/frontend/web/components/IssueSelect.tsx b/frontend/web/components/IssueSelect.tsx deleted file mode 100644 index bf88de1fd7bf..000000000000 --- a/frontend/web/components/IssueSelect.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { FC } from 'react' -import { Issue } from 'common/types/responses' - -export type IssueSelectType = { - disabled?: boolean - issues: Issue[] | undefined - onChange: (value: string) => void -} - -type IssueValueType = { - value: string -} - -const IssueSelect: FC = ({ disabled, issues, onChange }) => { - return ( -
- onChange(v?.value)} - disabled={disabled} - options={pullRequest?.map((p: PullRequest) => { - return { - label: `${p.title} #${p.number}`, - value: p.html_url, - } - })} - /> -
- ) -} - -export default PullRequestsSelect diff --git a/frontend/web/components/SegmentOverrides.js b/frontend/web/components/SegmentOverrides.js index 4b23f530a320..886e0eb28fdf 100644 --- a/frontend/web/components/SegmentOverrides.js +++ b/frontend/web/components/SegmentOverrides.js @@ -245,7 +245,7 @@ const SegmentOverrideInner = class Override extends React.Component {
{showValue ? ( <> - + - { - 'Are you sure you want to delete this segment override? This will be applied when you click Update Segment Overrides and cannot be undone.' - } + {`Are you sure you want to delete this segment override?${ + this.props.is4Eyes + ? '' + : ' This will be applied when you click Update Segment Overrides and cannot be undone.' + }`}
), destructive: true, diff --git a/frontend/web/components/ValueEditor.js b/frontend/web/components/ValueEditor.js index 07129e434f49..5eb00f03b286 100644 --- a/frontend/web/components/ValueEditor.js +++ b/frontend/web/components/ValueEditor.js @@ -4,6 +4,8 @@ import Highlight from './Highlight' import ConfigProvider from 'common/providers/ConfigProvider' import { Clipboard } from 'polyfill-react-native' import Icon from './Icon' +import { IonIcon } from '@ionic/react' +import { checkmarkCircle, warning } from 'ionicons/icons' const toml = require('toml') const yaml = require('yaml') @@ -91,24 +93,25 @@ class Validation extends Component { render() { const displayLanguage = this.props.language === 'ini' ? 'toml' : this.props.language - return ( + return this.state.error ? ( - ) : ( - - ) + } > - {!this.state.error - ? `${displayLanguage} validation passed` - : `${displayLanguage} validation error, please check your value.
Error: ${this.state.error}`} + {`${displayLanguage} validation error, please check your value.
Error: ${this.state.error}`}
+ ) : ( + ) } } @@ -127,7 +130,11 @@ class ValueEditor extends Component { } renderValidation = () => ( - + ) render() { @@ -217,6 +224,7 @@ class ValueEditor extends Component { data-test={rest['data-test']} disabled={rest.disabled} onChange={rest.disabled ? null : rest.onChange} + onBlur={rest.disabled ? null : rest.onBlur} className={this.state.language} > {typeof rest.value !== 'undefined' && rest.value != null diff --git a/frontend/web/components/base/forms/Input.js b/frontend/web/components/base/forms/Input.js index 5751bee4c02d..2678e109bd8d 100644 --- a/frontend/web/components/base/forms/Input.js +++ b/frontend/web/components/base/forms/Input.js @@ -153,6 +153,9 @@ const Input = class extends React.Component { value={this.props.value} className={innerClassName} disabled={disabled} + autoComplete={ + this.props.enableAutoComplete ? undefined : 'one-time-code' + } /> )} {this.props.type === 'password' && ( diff --git a/frontend/web/components/base/forms/TabItem.tsx b/frontend/web/components/base/forms/TabItem.tsx index 373c293c4c67..89a0028eccac 100644 --- a/frontend/web/components/base/forms/TabItem.tsx +++ b/frontend/web/components/base/forms/TabItem.tsx @@ -1,6 +1,7 @@ import { FC, ReactNode } from 'react' // we need this to make JSX compile type TabItemType = { + tabLabelString?: string tabLabel: ReactNode children: ReactNode } diff --git a/frontend/web/components/modals/AssociatedSegmentOverrides.js b/frontend/web/components/modals/AssociatedSegmentOverrides.js index 002393a97c72..4af7279608a2 100644 --- a/frontend/web/components/modals/AssociatedSegmentOverrides.js +++ b/frontend/web/components/modals/AssociatedSegmentOverrides.js @@ -12,6 +12,7 @@ import EnvironmentSelect from 'components/EnvironmentSelect' import SegmentOverrideLimit from 'components/SegmentOverrideLimit' import { getStore } from 'common/store' import { getEnvironment } from 'common/services/useEnvironment' +import { saveFeatureWithValidation } from 'components/saveFeatureWithValidation' class TheComponent extends Component { state = { @@ -306,7 +307,7 @@ export default class SegmentOverridesInner extends Component { return ( {({}, { editFeatureSegments, isSaving }) => { - const save = () => { + const save = saveFeatureWithValidation(() => { FeatureListStore.isSaving = true FeatureListStore.trigger('change') !isSaving && @@ -324,7 +325,7 @@ export default class SegmentOverridesInner extends Component { }, ) this.setState({ isSaving: true }) - } + }) const segmentOverride = segmentOverrides && segmentOverrides.filter((v) => v.segment === id) if (!segmentOverrides) return null diff --git a/frontend/web/components/modals/CreateEditIntegrationModal.js b/frontend/web/components/modals/CreateEditIntegrationModal.js index 008c320bb544..9872dd64b033 100644 --- a/frontend/web/components/modals/CreateEditIntegrationModal.js +++ b/frontend/web/components/modals/CreateEditIntegrationModal.js @@ -69,7 +69,7 @@ const CreateEditIntegration = class extends Component { const isEdit = this.props.data && this.props.data.id Utils.preventDefault(e) if (this.props.integration.isExternalInstallation) { - closeModal() + this.onComplete() } if (this.state.isLoading) { return diff --git a/frontend/web/components/modals/CreateFlag.js b/frontend/web/components/modals/CreateFlag.js index 7a7ced10e4cd..171cb969cc4c 100644 --- a/frontend/web/components/modals/CreateFlag.js +++ b/frontend/web/components/modals/CreateFlag.js @@ -41,6 +41,7 @@ import { getGithubIntegration } from 'common/services/useGithubIntegration' import { removeUserOverride } from 'components/RemoveUserOverride' import ExternalResourcesLinkTab from 'components/ExternalResourcesLinkTab' import MetadataTitle from 'components/metadata/MetadataTitle' +import { saveFeatureWithValidation } from 'components/saveFeatureWithValidation' const CreateFlag = class extends Component { static displayName = 'CreateFlag' @@ -75,7 +76,6 @@ const CreateFlag = class extends Component { description, enabledIndentity: false, enabledSegment: false, - environmentFlag: this.props.environmentFlag, externalResource: {}, externalResources: [], featureContentType: {}, @@ -179,7 +179,7 @@ const CreateFlag = class extends Component { if ( !Project.disableAnalytics && this.props.projectFlag && - this.state.environmentFlag + this.props.environmentFlag ) { this.getFeatureUsage() } @@ -262,10 +262,10 @@ const CreateFlag = class extends Component { } getFeatureUsage = () => { - if (this.state.environmentFlag) { + if (this.props.environmentFlag) { AppActions.getFeatureUsage( this.props.projectId, - this.state.environmentFlag.environment, + this.props.environmentFlag.environment, this.props.projectFlag.id, this.state.period, ) @@ -273,6 +273,7 @@ const CreateFlag = class extends Component { } save = (func, isSaving) => { const { + environmentFlag, environmentId, identity, identityFlag, @@ -282,7 +283,6 @@ const CreateFlag = class extends Component { const { default_enabled, description, - environmentFlag, initial_value, is_archived, is_server_key_only, @@ -293,9 +293,9 @@ const CreateFlag = class extends Component { ..._projectFlag, } const hasMultivariate = - this.state.environmentFlag && - this.state.environmentFlag.multivariate_feature_state_values && - this.state.environmentFlag.multivariate_feature_state_values.length + this.props.environmentFlag && + this.props.environmentFlag.multivariate_feature_state_values && + this.props.environmentFlag.multivariate_feature_state_values.length if (identity) { !isSaving && name && @@ -306,7 +306,7 @@ const CreateFlag = class extends Component { identityFlag: Object.assign({}, identityFlag || {}, { enabled: default_enabled, feature_state_value: hasMultivariate - ? this.state.environmentFlag.feature_state_value + ? this.props.environmentFlag.feature_state_value : initial_value, multivariate_options: this.state.identityVariations, }), @@ -565,7 +565,7 @@ const CreateFlag = class extends Component { {!identity && this.state.tags && ( @@ -787,7 +787,7 @@ const CreateFlag = class extends Component { this.setState({ description: Utils.safeParseEventValue(e) }) } type='text' - title={identity ? 'Description' : 'Description (optional)'} + title={identity ? 'Description' : 'Description'} placeholder='No description' /> @@ -815,7 +815,7 @@ const CreateFlag = class extends Component { onChangeIdentityVariations={(identityVariations) => { this.setState({ identityVariations, valueChanged: true }) }} - environmentFlag={this.state.environmentFlag} + environmentFlag={this.props.environmentFlag} projectFlag={projectFlag} onValueChange={(e) => { const initial_value = Utils.getTypedValue( @@ -858,7 +858,7 @@ const CreateFlag = class extends Component { editFeatureValue, }, ) => { - const saveFeatureValue = (schedule) => { + const saveFeatureValue = saveFeatureWithValidation((schedule) => { this.setState({ valueChanged: false }) if ((is4Eyes || schedule) && !identity) { openModal2( @@ -929,29 +929,17 @@ const CreateFlag = class extends Component { }} />, ) - } else if ( - document.getElementById('language-validation-error') - ) { - openConfirm({ - body: 'Your remote config value does not pass validation for the language you have selected. Are you sure you wish to save?', - noText: 'Cancel', - onYes: () => { - this.save(editFeatureValue, isSaving) - }, - title: 'Validation error', - yesText: 'Save', - }) } else { this.save(editFeatureValue, isSaving) } - } + }) const saveSettings = () => { this.setState({ settingsChanged: false }) this.save(editFeatureSettings, isSaving) } - const saveFeatureSegments = () => { + const saveFeatureSegments = saveFeatureWithValidation(() => { this.setState({ segmentsChanged: false }) if (is4Eyes && isVersioned && !identity) { @@ -1024,11 +1012,11 @@ const CreateFlag = class extends Component { } else { this.save(editFeatureSegments, isSaving) } - } + }) - const onCreateFeature = () => { + const onCreateFeature = saveFeatureWithValidation(() => { this.save(createFlag, isSaving) - } + }) const featureLimitAlert = Utils.calculateRemainingLimitsPercentage( @@ -1107,7 +1095,7 @@ const CreateFlag = class extends Component { /> )} @@ -1323,6 +1311,7 @@ const CreateFlag = class extends Component { return ( { changeRequest ? 'Change Request' : 'Feature' }`, ) + const envFlags = FeatureListStore.getEnvironmentFlags() + if (createdFlag) { + //update the create flag modal to edit view const projectFlag = FeatureListStore.getProjectFlags()?.find?.( (flag) => flag.name === createdFlag, ) @@ -1988,7 +1980,6 @@ const FeatureProvider = (WrappedComponent) => { {}, `${document.location.pathname}?feature=${projectFlag.id}`, ) - const envFlags = FeatureListStore.getEnvironmentFlags() const newEnvironmentFlag = envFlags?.[projectFlag.id] || {} setModalTitle(`Edit Feature ${projectFlag.name}`) this.setState({ @@ -1998,6 +1989,20 @@ const FeatureProvider = (WrappedComponent) => { }, projectFlag, }) + } else if (this.props.projectFlag) { + //update the environmentFlag and projectFlag to the new values + const newEnvironmentFlag = + envFlags?.[this.props.projectFlag.id] || {} + const newProjectFlag = FeatureListStore.getProjectFlags()?.find?.( + (flag) => flag.id === this.props.projectFlag.id, + ) + this.setState({ + environmentFlag: { + ...this.state.environmentFlag, + ...(newEnvironmentFlag || {}), + }, + projectFlag: newProjectFlag, + }) } if (changeRequest) { closeModal() diff --git a/frontend/web/components/modals/CreateMetadataField.tsx b/frontend/web/components/modals/CreateMetadataField.tsx index e6cb2981b24a..a46ee9188fae 100644 --- a/frontend/web/components/modals/CreateMetadataField.tsx +++ b/frontend/web/components/modals/CreateMetadataField.tsx @@ -267,7 +267,7 @@ const CreateMetadataField: FC = ({ setDescription(Utils.safeParseEventValue(event)) }} type='text' - title={'Description (optional)'} + title={'Description'} placeholder={"e.g. 'The JIRA Ticket Number associated with this flag'"} /> = ({ const save = (e: FormEvent) => { Utils.preventDefault(e) + setValueChanged(false) const segmentData: Omit = { description, feature: feature, @@ -222,6 +231,28 @@ const CreateSegment: FC = ({ } } + const [valueChanged, setValueChanged] = useState(false) + const onClosing = useCallback(() => { + return new Promise((resolve) => { + if (valueChanged) { + openConfirm({ + body: 'Closing this will discard your unsaved changes.', + noText: 'Cancel', + onNo: () => resolve(false), + onYes: () => resolve(true), + title: 'Discard changes', + yesText: 'Ok', + }) + } else { + resolve(true) + } + }) + return Promise.resolve(true) + }, [valueChanged, isEdit]) + useEffect(() => { + setInterceptClose(onClosing) + return () => setInterceptClose(null) + }, [onClosing]) const isValid = useMemo(() => { if (!rules[0]?.rules?.find((v) => !v.delete)) { return false @@ -316,8 +347,14 @@ const CreateSegment: FC = ({ data-test={`rule-${i}`} rule={rule} operators={operators} - onRemove={() => removeRule(0, i)} - onChange={(v: SegmentRule) => updateRule(0, i, v)} + onRemove={() => { + setValueChanged(true) + removeRule(0, i) + }} + onChange={(v: SegmentRule) => { + setValueChanged(true) + updateRule(0, i, v) + }} />
) @@ -376,13 +413,14 @@ const CreateSegment: FC = ({ id='segmentID' maxLength={SEGMENT_ID_MAXLENGTH} value={name} - onChange={(e: InputEvent) => + onChange={(e: InputEvent) => { + setValueChanged(true) setName( Format.enumeration .set(Utils.safeParseEventValue(e)) .toLowerCase(), ) - } + }} isValid={name && name.length} type='text' placeholder='E.g. power_users' @@ -398,12 +436,13 @@ const CreateSegment: FC = ({ name: 'featureDesc', readOnly: !!identity || readOnly, }} - onChange={(e: InputEvent) => + onChange={(e: InputEvent) => { + setValueChanged(true) setDescription(Utils.safeParseEventValue(e)) - } + }} isValid={name && name.length} type='text' - title='Description (optional)' + title='Description' placeholder="e.g. 'People who have spent over $100' " /> )} @@ -529,7 +568,15 @@ const CreateSegment: FC = ({ <> {isEdit && !condensed ? ( setTab(tab)}> - + + Rules{' '} + {valueChanged &&
{'*'}
} + + } + >
{Tab1}
@@ -543,6 +590,9 @@ const CreateSegment: FC = ({ const isReadOnly = !manageSegmentOverrides return ( { + setValueChanged(true) + }} feature={segment.feature} projectId={projectId} id={segment.id} diff --git a/frontend/web/components/mv/VariationValue.js b/frontend/web/components/mv/VariationValue.js index b1668baf7079..08278bf6329c 100644 --- a/frontend/web/components/mv/VariationValue.js +++ b/frontend/web/components/mv/VariationValue.js @@ -2,6 +2,7 @@ import React from 'react' import ValueEditor from 'components/ValueEditor' // we need this to make JSX compile import Constants from 'common/constants' import Icon from 'components/Icon' +import shallowEqual from 'fbjs/lib/shallowEqual' const VariationValue = ({ disabled, @@ -22,10 +23,24 @@ const VariationValue = ({ className='full-width code-medium' value={Utils.getTypedValue(Utils.featureStateToValue(value))} disabled={disabled || readOnlyValue} + onBlur={() => { + const newValue = { + ...value, + // Trim spaces and do conversion on blur + ...Utils.valueToFeatureState(Utils.featureStateToValue(value)), + } + if (!shallowEqual(newValue, value)) { + //occurs if we converted a trimmed value + onChange(newValue) + } + }} onChange={(e) => { onChange({ ...value, - ...Utils.valueToFeatureState(Utils.safeParseEventValue(e)), + ...Utils.valueToFeatureState( + Utils.safeParseEventValue(e), + false, + ), }) }} placeholder="e.g. 'big' " diff --git a/frontend/web/components/pages/AuditLogItemPage.tsx b/frontend/web/components/pages/AuditLogItemPage.tsx index 29551fe49236..9d9334eb3cf8 100644 --- a/frontend/web/components/pages/AuditLogItemPage.tsx +++ b/frontend/web/components/pages/AuditLogItemPage.tsx @@ -12,6 +12,8 @@ import DiffString from 'components/diff/DiffString' import DiffEnabled from 'components/diff/DiffEnabled' import Format from 'common/utils/format' import { Environment } from 'common/types/responses' +import { Link } from 'react-router-dom' +import Button from 'components/base/forms/Button' type AuditLogItemPageType = { match: { params: { @@ -41,7 +43,7 @@ const AuditLogItemPage: FC = ({ match }) => { items={[ { title: 'Audit Log', - url: `/project/${match.params.projectId}/environment/${match.params.environmentId}/audit-log`, + url: `/project/${match.params.projectId}/audit-log`, }, ]} currentPage={match.params.id} @@ -112,6 +114,15 @@ const AuditLogItemPage: FC = ({ match }) => {
)} + {data.related_object_type === 'EF_VERSION' && + !!data.project && + !!data.environment && ( + + + + )} )}
diff --git a/frontend/web/components/pages/ChangeRequestPage.js b/frontend/web/components/pages/ChangeRequestPage.js index 9d69cbfc97e6..579b3cc6c581 100644 --- a/frontend/web/components/pages/ChangeRequestPage.js +++ b/frontend/web/components/pages/ChangeRequestPage.js @@ -148,13 +148,17 @@ const ChangeRequestsPage = class extends Component { AppActions.actionChangeRequest(this.props.match.params.id, 'approve') } + getScheduledDate = (changeRequest) => { + return changeRequest.environment_feature_versions.length > 0 + ? moment(changeRequest.environment_feature_versions[0].live_from) + : moment(changeRequest.feature_states[0].live_from) + } + publishChangeRequest = () => { const id = this.props.match.params.id const changeRequest = ChangeRequestStore.model[id] - const isScheduled = - new Date(changeRequest.feature_states[0].live_from).valueOf() > - new Date().valueOf() - const scheduledDate = moment(changeRequest.feature_states[0].live_from) + const scheduledDate = this.getScheduledDate(changeRequest) + const isScheduled = scheduledDate > moment() openConfirm({ body: ( @@ -252,11 +256,9 @@ const ChangeRequestsPage = class extends Component { orgUsers && orgUsers.find((v) => v.id === changeRequest.committed_by)) || {} - const isScheduled = - new Date(changeRequest.feature_states[0].live_from).valueOf() > - new Date().valueOf() - const scheduledDate = moment(changeRequest.feature_states[0].live_from) + const scheduledDate = this.getScheduledDate(changeRequest) + const isScheduled = scheduledDate > moment() const approval = changeRequest && diff --git a/frontend/web/components/pages/CreateEnvironmentPage.js b/frontend/web/components/pages/CreateEnvironmentPage.js index dc8e30fc73d5..f61a6c657cab 100644 --- a/frontend/web/components/pages/CreateEnvironmentPage.js +++ b/frontend/web/components/pages/CreateEnvironmentPage.js @@ -190,6 +190,11 @@ const CreateEnvironmentPage = class extends Component { /> )} + {error && ( + + + + )}
{Utils.getFlagsmithHasFeature('enable_metadata') && envContentType?.id && ( @@ -228,7 +233,6 @@ const CreateEnvironmentPage = class extends Component { )} - {error && }
{permission ? ( diff --git a/frontend/web/components/pages/FeatureHistoryDetailPage.tsx b/frontend/web/components/pages/FeatureHistoryDetailPage.tsx new file mode 100644 index 000000000000..b57bbd0cac64 --- /dev/null +++ b/frontend/web/components/pages/FeatureHistoryDetailPage.tsx @@ -0,0 +1,129 @@ +import React, { FC, useState } from 'react' +import ConfigProvider from 'common/providers/ConfigProvider' +import { RouterChildContext } from 'react-router' +import ProjectStore from 'common/stores/project-store' +import { + useGetFeatureVersionQuery, + useGetFeatureVersionsQuery, +} from 'common/services/useFeatureVersion' +import { useGetUsersQuery } from 'common/services/useUser' +import AccountStore from 'common/stores/account-store' +import { Environment } from 'common/types/responses' +import PageTitle from 'components/PageTitle' +import FeatureVersion from 'components/FeatureVersion' +import moment from 'moment' +import ErrorMessage from 'components/ErrorMessage' +import Tabs from 'components/base/forms/Tabs' +import TabItem from 'components/base/forms/TabItem' + +type FeatureHistoryPageType = { + router: RouterChildContext['router'] + + match: { + params: { + id: string + environmentId: string + projectId: string + } + } +} + +const FeatureHistoryPage: FC = ({ match, router }) => { + const [open, setOpen] = useState(false) + + const env: Environment | undefined = ProjectStore.getEnvironment( + match.params.environmentId, + ) as any + // @ts-ignore + const environmentId = `${env?.id}` + const uuid = match.params.id + const { data: users } = useGetUsersQuery({ + organisationId: AccountStore.getOrganisation().id, + }) + const { data, error, isLoading } = useGetFeatureVersionQuery({ + uuid, + }) + const featureId = data?.feature + const { + data: versions, + error: versionsError, + isLoading: versionsLoading, + } = useGetFeatureVersionsQuery( + { + environmentId, + featureId: featureId as any, + }, + { + skip: !featureId, + }, + ) + const user = users?.find((user) => data?.published_by === user.id) + const live = versions?.results?.[0] + return ( +
+ +
+ View and rollback history of feature values, multivariate values and + segment overrides. +
+
+ {!!(error || versionsError) && ( + {error || versionsError} + )} + + {(isLoading || versionsLoading) && ( +
+ +
+ )} + {!!data && !!versions && ( +
+ +
+
+ Published{' '} + + {moment(data.live_from).format('Do MMM HH:mma')} + {' '} + by{' '} + + {user + ? `${user.first_name || ''} ${user.last_name || ''} ` + : 'System '} + +
+ + {!!data.previous_version_uuid && ( + +
+ +
+
+ )} + +
+ +
+
+
+
+
+
+ )} +
+ ) +} + +export default ConfigProvider(FeatureHistoryPage) diff --git a/frontend/web/components/pages/FeatureHistoryPage.tsx b/frontend/web/components/pages/FeatureHistoryPage.tsx index 6102272795a0..dc7fffcc525f 100644 --- a/frontend/web/components/pages/FeatureHistoryPage.tsx +++ b/frontend/web/components/pages/FeatureHistoryPage.tsx @@ -18,8 +18,9 @@ import FeatureVersion from 'components/FeatureVersion' import InlineModal from 'components/InlineModal' import TableFilterItem from 'components/tables/TableFilterItem' import moment from 'moment' +import { Link } from 'react-router-dom' -const widths = [250, 100] +const widths = [250, 150] type FeatureHistoryPageType = { router: RouterChildContext['router'] @@ -40,6 +41,7 @@ const FeatureHistoryPage: FC = ({ match, router }) => { ) as any // @ts-ignore const environmentId = `${env?.id}` + const environmentApiKey = `${env?.api_key}` const { data: users } = useGetUsersQuery({ organisationId: AccountStore.getOrganisation().id, }) @@ -99,6 +101,9 @@ const FeatureHistoryPage: FC = ({ match, router }) => {
View
+
+ Compare +
} renderRow={(v: TFeatureVersion, i: number) => { @@ -117,7 +122,19 @@ const FeatureHistoryPage: FC = ({ match, router }) => { ? `${user.first_name || ''} ${user.last_name || ''} ` : 'System '}
- +
+ + + +
{i + 1 !== data!.results.length && ( <> @@ -152,7 +169,7 @@ const FeatureHistoryPage: FC = ({ match, router }) => { theme='text' size='xSmall' > - Compare + Quick compare
)} diff --git a/frontend/web/components/pages/GitHubSetupPage.tsx b/frontend/web/components/pages/GitHubSetupPage.tsx index 4e3d77a9b37b..83d9bf12753f 100644 --- a/frontend/web/components/pages/GitHubSetupPage.tsx +++ b/frontend/web/components/pages/GitHubSetupPage.tsx @@ -61,8 +61,8 @@ const GitHubSetupPage: FC = ({ location }) => { ] = useCreateGithubRepositoryMutation() useEffect(() => { - if (reposLoaded && repos.repositories) { - setRepositoryOwner(repos?.repositories[0].owner.login) + if (reposLoaded && repos.results) { + setRepositoryOwner(repos?.results[0].full_name.split('/')[0]) setRepositories(repos) } }, [repos, reposLoaded]) @@ -168,7 +168,7 @@ const GitHubSetupPage: FC = ({ location }) => { size='select-md' placeholder={'Select your repository'} onChange={(v: repoType) => setRepositoryName(v.label)} - options={repositories?.repositories?.map((r: repoType) => { + options={repositories?.results?.map((r: repoType) => { return { label: r.name, value: r.name } })} /> @@ -254,7 +254,7 @@ const GitHubSetupPage: FC = ({ location }) => { await createGithubRepository({ body: { project: project.id, - repository_name: repositoryName, + repository_name: project.repo, repository_owner: repositoryOwner, }, github_id: res?.data?.id, diff --git a/frontend/web/components/pages/HomePage.js b/frontend/web/components/pages/HomePage.js index ac6c710b8894..bb32be331c91 100644 --- a/frontend/web/components/pages/HomePage.js +++ b/frontend/web/components/pages/HomePage.js @@ -367,6 +367,7 @@ const HomePage = class extends React.Component { title='Password' inputProps={{ className: 'full-width', + enableAutoComplete: true, error: error && error.password, name: 'password', }} @@ -569,6 +570,7 @@ const HomePage = class extends React.Component { data-test='email' inputProps={{ className: 'full-width', + enableAutoComplete: true, error: error && error.email, name: 'email', }} diff --git a/frontend/web/components/saveFeatureWithValidation.ts b/frontend/web/components/saveFeatureWithValidation.ts new file mode 100644 index 000000000000..816b7791269a --- /dev/null +++ b/frontend/web/components/saveFeatureWithValidation.ts @@ -0,0 +1,19 @@ +export const saveFeatureWithValidation = (cb: (schedule?: boolean) => void) => { + return (schedule: boolean) => { + if (document.getElementById('language-validation-error')) { + openConfirm({ + body: 'Your remote config value does not pass validation for the language you have selected. Are you sure you wish to save?', + noText: 'Cancel', + onYes: () => cb(), + title: 'Validation error', + yesText: 'Save', + }) + } else { + if (schedule) { + cb(schedule) + } else { + cb() + } + } + } +} diff --git a/frontend/web/project/api.js b/frontend/web/project/api.js index 665f118fd2f3..1f33a17a6bf4 100644 --- a/frontend/web/project/api.js +++ b/frontend/web/project/api.js @@ -17,7 +17,13 @@ global.API = { // Catch coding errors that end up here if (res instanceof Error) { - console.log(res) + console.error(res) + store.error = res + store.goneABitWest() + return + } else if (res.data) { + store.error = res.data + store.goneABitWest() return } diff --git a/frontend/web/project/project-components.js b/frontend/web/project/project-components.js index 8a50c7455ad1..fa740f053778 100644 --- a/frontend/web/project/project-components.js +++ b/frontend/web/project/project-components.js @@ -113,11 +113,17 @@ global.Select = class extends PureComponent { ))}
) : ( - + ) } } diff --git a/frontend/web/routes.js b/frontend/web/routes.js index 1370fd8a529b..f91b836d9044 100644 --- a/frontend/web/routes.js +++ b/frontend/web/routes.js @@ -39,6 +39,7 @@ import UsersAndPermissionsPage from './components/pages/UsersAndPermissionsPage' import ProjectRedirectPage from './components/pages/ProjectRedirectPage' import SDKKeysPage from './components/SDKKeysPage' import { ParameterizedRoute } from './components/base/higher-order/ParameterizedRoute' +import FeatureHistoryDetailPage from './components/pages/FeatureHistoryDetailPage' export default ( @@ -138,6 +139,11 @@ export default ( exact component={FeatureHistoryPage} /> + - diff --git a/frontend/web/styles/3rdParty/_hljs.scss b/frontend/web/styles/3rdParty/_hljs.scss index 1ce7dc9051f5..6d768aba9c95 100644 --- a/frontend/web/styles/3rdParty/_hljs.scss +++ b/frontend/web/styles/3rdParty/_hljs.scss @@ -69,6 +69,9 @@ } } span { + display: flex; + align-items: center; + gap: 2px; color: $text-icon-light-grey; cursor: pointer; font-size: $font-caption-sm; diff --git a/frontend/web/styles/project/_alert.scss b/frontend/web/styles/project/_alert.scss index 2715e5fded0b..db8d0ab54b76 100644 --- a/frontend/web/styles/project/_alert.scss +++ b/frontend/web/styles/project/_alert.scss @@ -40,6 +40,12 @@ .alert-info { background-color: $alert-info-bg; border-color: $alert-info-border-color; + + .close-btn { + margin-top: -3px; + font-size: 20px; + color: $alert-announcement-close-btn; + } .title { font-weight: 500; margin-bottom: 2px; @@ -72,25 +78,6 @@ } } -.announcement { - margin: auto; - margin-top: 10px; - display: flex; - flex-direction: row; - .body { - flex-direction: column; - justify-content: flex-start; - } - .close-btn { - margin-top: -3px; - font-size: 20px; - color: $alert-announcement-close-btn; - } - .info-icon { - margin-top: 15px; - } -} - .dark { .alert { color: $alert-color-dark; @@ -114,11 +101,6 @@ .alert-danger{ background-color: $danger-solid-dark-alert } - .announcement { - .close-btn { - color: $alert-color-dark; - } - } } .text-info { color: $primary !important; diff --git a/infrastructure/aws/production/ecs-task-definition-task-processor.json b/infrastructure/aws/production/ecs-task-definition-task-processor.json index 6c441ca92a58..db3a2388c2db 100644 --- a/infrastructure/aws/production/ecs-task-definition-task-processor.json +++ b/infrastructure/aws/production/ecs-task-definition-task-processor.json @@ -139,7 +139,11 @@ }, { "name": "TASK_DELETE_RETENTION_DAYS", - "value": "45" + "value": "44" + }, + { + "name": "TASK_DELETE_BATCH_SIZE", + "value": "50000" }, { "name": "ENABLE_HUBSPOT_LEAD_TRACKING", diff --git a/infrastructure/aws/production/ecs-task-definition-web.json b/infrastructure/aws/production/ecs-task-definition-web.json index e98f5bce901e..3d5cdd1a9c5e 100644 --- a/infrastructure/aws/production/ecs-task-definition-web.json +++ b/infrastructure/aws/production/ecs-task-definition-web.json @@ -137,7 +137,7 @@ }, { "name": "DASHBOARD_ENDPOINTS_SENTRY_TRACE_SAMPLE_RATE", - "value": "0.01" + "value": "0.002" }, { "name": "SLACK_CLIENT_ID", diff --git a/infrastructure/aws/staging/ecs-task-definition-task-processor.json b/infrastructure/aws/staging/ecs-task-definition-task-processor.json index b944f02cd50a..19bcb70f4d3a 100644 --- a/infrastructure/aws/staging/ecs-task-definition-task-processor.json +++ b/infrastructure/aws/staging/ecs-task-definition-task-processor.json @@ -109,7 +109,7 @@ }, { "name": "USE_POSTGRES_FOR_ANALYTICS", - "value": "True" + "value": "False" }, { "name": "TASK_DELETE_RETENTION_DAYS", @@ -153,10 +153,6 @@ "name": "CHARGEBEE_API_KEY", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:CHARGEBEE_API_KEY::" }, - { - "name": "ANALYTICS_DATABASE_URL", - "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:ANALYTICS_DATABASE_URL::" - }, { "name": "DATABASE_URL", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:DATABASE_URL::" diff --git a/infrastructure/aws/staging/ecs-task-definition-web.json b/infrastructure/aws/staging/ecs-task-definition-web.json index a7edca9cc62c..16938a00ae39 100644 --- a/infrastructure/aws/staging/ecs-task-definition-web.json +++ b/infrastructure/aws/staging/ecs-task-definition-web.json @@ -165,7 +165,19 @@ }, { "name": "USE_POSTGRES_FOR_ANALYTICS", - "value": "True" + "value": "False" + }, + { + "name": "INFLUXDB_ORG", + "value": "ben.rometsch@bullet-train.io" + }, + { + "name": "INFLUXDB_BUCKET", + "value": "api_staging" + }, + { + "name": "INFLUXDB_URL", + "value": "https://eu-central-1-1.aws.cloud2.influxdata.com" }, { "name": "DEFAULT_THROTTLE_CLASSES", @@ -197,10 +209,6 @@ "name": "DATABASE_URL", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:DATABASE_URL::" }, - { - "name": "ANALYTICS_DATABASE_URL", - "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:ANALYTICS_DATABASE_URL::" - }, { "name": "DJANGO_SECRET_KEY", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:DJANGO_SECRET_KEY::" @@ -217,6 +225,10 @@ "name": "GITHUB_CLIENT_SECRET", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:GITHUB_CLIENT_SECRET::" }, + { + "name": "INFLUXDB_TOKEN", + "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:INFLUXDB_TOKEN::" + }, { "name": "OAUTH_CLIENT_SECRET", "valueFrom": "arn:aws:secretsmanager:eu-west-2:302456015006:secret:ECS-API-heAdoB:OAUTH_CLIENT_SECRET::" diff --git a/release-please-config.json b/release-please-config.json index 80f8645ec39f..e5614eec229d 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -29,19 +29,14 @@ "section": "Infrastructure (Flagsmith SaaS Only)" }, { - "type": "docs", - "hidden": true, - "section": "Docs" - }, - { - "type": "chore", + "type": "ci", "hidden": true, - "section": "Other" + "section": "CI" }, { - "type": "build", + "type": "docs", "hidden": true, - "section": "Build" + "section": "Docs" }, { "type": "deps", @@ -49,9 +44,9 @@ "section": "Dependency Updates" }, { - "type": "ci", + "type": "perf", "hidden": true, - "section": "CI" + "section": "Performance Improvements" }, { "type": "refactor", @@ -59,14 +54,14 @@ "section": "Refactoring" }, { - "type": "style", + "type": "test", "hidden": true, - "section": "Code Style" + "section": "Tests" }, { - "type": "test", + "type": "chore", "hidden": true, - "section": "Tests" + "section": "Other" } ] } \ No newline at end of file diff --git a/trivy.yaml b/trivy.yaml new file mode 100644 index 000000000000..3d69c3ef41b5 --- /dev/null +++ b/trivy.yaml @@ -0,0 +1,4 @@ +format: json +exit-code: 0 +severity: CRITICAL,HIGH +vuln-type: os,library diff --git a/version.txt b/version.txt index 3fbf3f9a4bc7..f8724e145388 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.116.0 +2.119.1