From 39a93766d622dc00f3fe550d254e6fed106cb689 Mon Sep 17 00:00:00 2001 From: Kyle McCormick Date: Mon, 16 Dec 2024 12:43:43 -0500 Subject: [PATCH 01/20] build!: Remove Dockerfile and image building workflows (#36006) These are not maintained by the Open edX community. https://github.com/openedx/public-engineering/issues/263 --- .github/workflows/docker-publish.yml | 43 ---- .github/workflows/publish-ci-docker-image.yml | 43 ---- Dockerfile | 200 ------------------ Makefile | 34 +-- 4 files changed, 1 insertion(+), 319 deletions(-) delete mode 100644 .github/workflows/docker-publish.yml delete mode 100644 .github/workflows/publish-ci-docker-image.yml delete mode 100644 Dockerfile diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml deleted file mode 100644 index 6831e3563d81..000000000000 --- a/.github/workflows/docker-publish.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Push Docker Images - -on: - push: - branches: - - master - -jobs: - # Push image to GitHub Packages. - # See also https://docs.docker.com/docker-hub/builds/ - push: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - strategy: - matrix: - variant: - - "lms_dev" - - "cms_dev" - - "cms" - - "lms" - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} - - - name: Build and push lms/cms base docker images - env: - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - run: make docker_tag_build_push_${{matrix.variant}} diff --git a/.github/workflows/publish-ci-docker-image.yml b/.github/workflows/publish-ci-docker-image.yml deleted file mode 100644 index 6a0f3768b7e6..000000000000 --- a/.github/workflows/publish-ci-docker-image.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Push CI Runner Docker Image - -on: - workflow_dispatch: - schedule: - - cron: "0 1 * * 3" - -jobs: - push: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # This has to happen after checkout in order for gh to work. - - name: "Cancel scheduled job on forks" - if: github.repository != 'openedx/edx-platform' && github.event_name == 'schedule' - env: - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - run: | - gh run cancel "${{ github.run_id }}" - gh run watch "${{ github.run_id }}" - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.TOOLS_EDX_ECR_USER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.TOOLS_EDX_ECR_USER_AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - - name: Log in to ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v2 - - - name: Build, tag, and push image to Amazon ECR - env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - ECR_REPOSITORY: actions-runner - IMAGE_TAG: latest - run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f scripts/ci-runner.Dockerfile . - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index a67c46738bcc..000000000000 --- a/Dockerfile +++ /dev/null @@ -1,200 +0,0 @@ -FROM ubuntu:focal as minimal-system - -# Warning: This file is experimental. -# -# Short-term goals: -# * Be a suitable replacement for the `edxops/edxapp` image in devstack (in progress). -# * Take advantage of Docker caching layers: aim to put commands in order of -# increasing cache-busting frequency. -# * Related to ^, use no Ansible or Paver. -# Long-term goal: -# * Be a suitable base for production LMS and CMS images (THIS IS NOT YET THE CASE!). - -ARG DEBIAN_FRONTEND=noninteractive -ARG SERVICE_VARIANT -ARG SERVICE_PORT - -# Env vars: paver -# We intentionally don't use paver in this Dockerfile, but Devstack may invoke paver commands -# during provisioning. Enabling NO_PREREQ_INSTALL tells paver not to re-install Python -# requirements for every paver command, potentially saving a lot of developer time. -ARG NO_PREREQ_INSTALL='1' - -# Env vars: locale -ENV LANG='en_US.UTF-8' -ENV LANGUAGE='en_US:en' -ENV LC_ALL='en_US.UTF-8' - -# Env vars: configuration -ENV CONFIG_ROOT='/edx/etc' -ENV LMS_CFG="$CONFIG_ROOT/lms.yml" -ENV CMS_CFG="$CONFIG_ROOT/cms.yml" - -# Env vars: path -ENV VIRTUAL_ENV="/edx/app/edxapp/venvs/edxapp" -ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" -ENV PATH="/edx/app/edxapp/edx-platform/node_modules/.bin:${PATH}" -ENV PATH="/edx/app/edxapp/edx-platform/bin:${PATH}" -ENV PATH="/edx/app/edxapp/nodeenv/bin:${PATH}" - -WORKDIR /edx/app/edxapp/edx-platform - -# Create user before assigning any directory ownership to it. -RUN useradd -m --shell /bin/false app - -# Use debconf to set locales to be generated when the locales apt package is installed later. -RUN echo "locales locales/default_environment_locale select en_US.UTF-8" | debconf-set-selections -RUN echo "locales locales/locales_to_be_generated multiselect en_US.UTF-8 UTF-8" | debconf-set-selections - -# Setting up ppa deadsnakes to get python 3.11 -RUN apt-get update && \ - apt-get install -y software-properties-common && \ - apt-add-repository -y ppa:deadsnakes/ppa - -# Install requirements that are absolutely necessary -RUN apt-get update && \ - apt-get -y dist-upgrade && \ - apt-get -y install --no-install-recommends \ - python3-pip \ - python3.11 \ - # python3-dev: required for building mysqlclient python package - python3.11-dev \ - python3.11-venv \ - libpython3.11 \ - libpython3.11-stdlib \ - libmysqlclient21 \ - # libmysqlclient-dev: required for building mysqlclient python package - libmysqlclient-dev \ - pkg-config \ - libssl1.1 \ - libxmlsec1-openssl \ - # lynx: Required by https://github.com/openedx/edx-platform/blob/b489a4ecb122/openedx/core/lib/html_to_text.py#L16 - lynx \ - ntp \ - git \ - build-essential \ - gettext \ - gfortran \ - graphviz \ - locales \ - swig \ - && \ - apt-get clean all && \ - rm -rf /var/lib/apt/* - -RUN mkdir -p /edx/var/edxapp -RUN mkdir -p /edx/etc -RUN chown app:app /edx/var/edxapp - -# The builder-production stage is a temporary stage that installs required packages and builds the python virtualenv, -# installs nodejs and node_modules. -# The built artifacts from this stage are then copied to the base stage. -FROM minimal-system as builder-production - -RUN apt-get update && \ - apt-get -y install --no-install-recommends \ - curl \ - libssl-dev \ - libffi-dev \ - libfreetype6-dev \ - libgeos-dev \ - libgraphviz-dev \ - libjpeg8-dev \ - liblapack-dev \ - libpng-dev \ - libsqlite3-dev \ - libxml2-dev \ - libxmlsec1-dev \ - libxslt1-dev - -# Setup python virtual environment -# It is already 'activated' because $VIRTUAL_ENV/bin was put on $PATH -RUN python3.11 -m venv "${VIRTUAL_ENV}" - -# Install python requirements -# Requires copying over requirements files, but not entire repository -COPY requirements requirements -RUN pip install -r requirements/pip.txt -RUN pip install -r requirements/edx/base.txt - -# Install node and npm -RUN nodeenv /edx/app/edxapp/nodeenv --node=20.15.1 --prebuilt -RUN npm install -g npm@10.7.x - -# This script is used by an npm post-install hook. -# We copy it into the image now so that it will be available when we run `npm install` in the next step. -# The script itself will copy certain modules into some uber-legacy parts of edx-platform which still use RequireJS. -COPY scripts/copy-node-modules.sh scripts/copy-node-modules.sh - -# Install node modules -COPY package.json package.json -COPY package-lock.json package-lock.json -RUN npm set progress=false && npm ci - -# The builder-development stage is a temporary stage that installs python modules required for development purposes -# The built artifacts from this stage are then copied to the development stage. -FROM builder-production as builder-development - -RUN pip install -r requirements/edx/development.txt - -# base stage -FROM minimal-system as base - -# Copy python virtual environment, nodejs and node_modules -COPY --from=builder-production /edx/app/edxapp/venvs/edxapp /edx/app/edxapp/venvs/edxapp -COPY --from=builder-production /edx/app/edxapp/nodeenv /edx/app/edxapp/nodeenv -COPY --from=builder-production /edx/app/edxapp/edx-platform/node_modules /edx/app/edxapp/edx-platform/node_modules - -# Copy over remaining parts of repository (including all code) -COPY . . - -# Install Python requirements again in order to capture local projects -RUN pip install -e . - -# Setting edx-platform directory as safe for git commands -RUN git config --global --add safe.directory /edx/app/edxapp/edx-platform - -# Production target -FROM base as production - -USER app - -ENV EDX_PLATFORM_SETTINGS='docker-production' -ENV SERVICE_VARIANT="${SERVICE_VARIANT}" -ENV SERVICE_PORT="${SERVICE_PORT}" -ENV DJANGO_SETTINGS_MODULE="${SERVICE_VARIANT}.envs.$EDX_PLATFORM_SETTINGS" -EXPOSE ${SERVICE_PORT} - -CMD gunicorn \ - -c /edx/app/edxapp/edx-platform/${SERVICE_VARIANT}/docker_${SERVICE_VARIANT}_gunicorn.py \ - --name ${SERVICE_VARIANT} \ - --bind=0.0.0.0:${SERVICE_PORT} \ - --max-requests=1000 \ - --access-logfile \ - - ${SERVICE_VARIANT}.wsgi:application - -# Development target -FROM base as development - -RUN apt-get update && \ - apt-get -y install --no-install-recommends \ - # wget is used in Makefile for common_constraints.txt - wget \ - && \ - apt-get clean all && \ - rm -rf /var/lib/apt/* - -COPY --from=builder-development /edx/app/edxapp/venvs/edxapp /edx/app/edxapp/venvs/edxapp - -RUN ln -s "$(pwd)/lms/envs/devstack-experimental.yml" "$LMS_CFG" -RUN ln -s "$(pwd)/cms/envs/devstack-experimental.yml" "$CMS_CFG" -# Temporary compatibility hack while devstack is supporting both the old `edxops/edxapp` image and this image. -# * Add in a dummy ../edxapp_env file -# * devstack sets /edx/etc/studio.yml as CMS_CFG. -RUN ln -s "$(pwd)/cms/envs/devstack-experimental.yml" "/edx/etc/studio.yml" -RUN touch ../edxapp_env - -ENV EDX_PLATFORM_SETTINGS='devstack_docker' -ENV SERVICE_VARIANT="${SERVICE_VARIANT}" -EXPOSE ${SERVICE_PORT} -CMD ./manage.py ${SERVICE_VARIANT} runserver 0.0.0.0:${SERVICE_PORT} diff --git a/Makefile b/Makefile index 62681f6f3711..8297f4b27b4b 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,7 @@ # Do things in edx-platform .PHONY: base-requirements check-types clean \ compile-requirements detect_changed_source_translations dev-requirements \ - docker_auth docker_build docker_tag_build_push_lms docker_tag_build_push_lms_dev \ - docker_tag_build_push_cms docker_tag_build_push_cms_dev docs extract_translations \ + docs extract_translations \ guides help lint-imports local-requirements migrate migrate-lms migrate-cms \ pre-requirements pull pull_xblock_translations pull_translations push_translations \ requirements shell swagger \ @@ -67,9 +66,6 @@ pull_translations: clean_translations ## pull translations via atlas detect_changed_source_translations: ## check if translation files are up-to-date i18n_tool changed -pull: ## update the Docker image used by "make shell" - docker pull edxops/edxapp:latest - pre-requirements: ## install Python requirements for running pip-tools pip install -r requirements/pip.txt pip install -r requirements/pip-tools.txt @@ -94,13 +90,6 @@ test-requirements: pre-requirements requirements: dev-requirements ## install development environment requirements -shell: ## launch a bash shell in a Docker container with all edx-platform dependencies installed - docker run -it -e "NO_PYTHON_UNINSTALL=1" -e "PIP_INDEX_URL=https://pypi.python.org/simple" -e TERM \ - -v `pwd`:/edx/app/edxapp/edx-platform:cached \ - -v edxapp_lms_assets:/edx/var/edxapp/staticfiles/ \ - -v edxapp_node_modules:/edx/app/edxapp/edx-platform/node_modules \ - edxops/edxapp:latest /edx/app/edxapp/devstack.sh open - # Order is very important in this list: files must appear after everything they include! REQ_FILES = \ requirements/edx/coverage \ @@ -164,27 +153,6 @@ upgrade-package: ## update just one package to the latest usable release check-types: ## run static type-checking tests mypy -docker_auth: - echo "$$DOCKERHUB_PASSWORD" | docker login -u "$$DOCKERHUB_USERNAME" --password-stdin - -docker_build: docker_auth - DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target development -t openedx/lms-dev - DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target production -t openedx/lms - DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target development -t openedx/cms-dev - DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target production -t openedx/cms - -docker_tag_build_push_lms: docker_auth - docker buildx build -t openedx/lms:latest -t openedx/lms:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target production --push . - -docker_tag_build_push_lms_dev: docker_auth - docker buildx build -t openedx/lms-dev:latest -t openedx/lms-dev:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target development --push . - -docker_tag_build_push_cms: docker_auth - docker buildx build -t openedx/cms:latest -t openedx/cms:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target production --push . - -docker_tag_build_push_cms_dev: docker_auth - docker buildx build -t openedx/cms-dev:latest -t openedx/cms-dev:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target development --push . - lint-imports: lint-imports From fb56042bdc125cff060afb61ca4bb6d4cc066f47 Mon Sep 17 00:00:00 2001 From: Tim McCormack Date: Mon, 16 Dec 2024 15:01:40 -0500 Subject: [PATCH 02/20] temp: Add temporary monitoring for large memcache keys (#36034) We'd like to finish the removal of MD4 key hashing, but want to know if cutting over to BLAKE2b will cause too large a cache turnover. Hopefully this monitoring will give us some insight into the rate of large keys. --- common/djangoapps/util/memcache.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/common/djangoapps/util/memcache.py b/common/djangoapps/util/memcache.py index 2f7e6dc623a0..723ca87098d1 100644 --- a/common/djangoapps/util/memcache.py +++ b/common/djangoapps/util/memcache.py @@ -9,6 +9,7 @@ from django.conf import settings from django.utils.encoding import smart_str +from edx_django_utils.monitoring.utils import increment def fasthash(string): @@ -48,9 +49,18 @@ def safe_key(key, key_prefix, version): # Attempt to combine the prefix, version, and key combined = ":".join([key_prefix, version, key]) + # Temporary: Add observability to large-key hashing to help us + # understand the safety of a cutover from md4 to blake2b hashing. + # See https://github.com/edx/edx-arch-experiments/issues/872 + increment('memcache.safe_key.called') + # If the total length is too long for memcache, hash it if len(combined) > 250: combined = fasthash(combined) + # Temporary: See + # https://github.com/edx/edx-arch-experiments/issues/872 and + # previous comment. + increment('memcache.safe_key.hash_large') # Return the result return combined From 9d2f890f1ec52e0cb98f879a3605792ad0771abf Mon Sep 17 00:00:00 2001 From: BrandonHBodine <5282802+BrandonHBodine@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:07:46 +0000 Subject: [PATCH 03/20] feat: Upgrade Python dependency edxval Bumping edxval to pull in new release Commit generated by workflow `openedx/edx-platform/.github/workflows/upgrade-one-python-dependency.yml@refs/heads/master` --- requirements/edx/base.txt | 2 +- requirements/edx/development.txt | 2 +- requirements/edx/doc.txt | 2 +- requirements/edx/testing.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index f95ae8adb2c9..2fd521330acc 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -549,7 +549,7 @@ edx-when==2.5.0 # via # -r requirements/edx/kernel.in # edx-proctoring -edxval==2.6.1 +edxval==2.7.0 # via -r requirements/edx/kernel.in elasticsearch==7.9.1 # via diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 75c533947407..45e90b16dd34 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -855,7 +855,7 @@ edx-when==2.5.0 # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt # edx-proctoring -edxval==2.6.1 +edxval==2.7.0 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index f7031d349784..3c1d06a28b4f 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -635,7 +635,7 @@ edx-when==2.5.0 # via # -r requirements/edx/base.txt # edx-proctoring -edxval==2.6.1 +edxval==2.7.0 # via -r requirements/edx/base.txt elasticsearch==7.9.1 # via diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 57a0dc6341ad..82e57272137a 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -658,7 +658,7 @@ edx-when==2.5.0 # via # -r requirements/edx/base.txt # edx-proctoring -edxval==2.6.1 +edxval==2.7.0 # via -r requirements/edx/base.txt elasticsearch==7.9.1 # via From 53de4065377272f5a8cb5482aee8846b9d36d42e Mon Sep 17 00:00:00 2001 From: Awais Qureshi Date: Tue, 17 Dec 2024 16:35:22 +0500 Subject: [PATCH 04/20] feat!: upgrade bulk_beta_modify_access to drf ( 30 ) (#35604) * feat!: upgrading api to DRF. --- lms/djangoapps/instructor/tests/test_api.py | 9 ++ lms/djangoapps/instructor/views/api.py | 151 +++++++++--------- lms/djangoapps/instructor/views/api_urls.py | 2 +- lms/djangoapps/instructor/views/serializer.py | 71 ++++++++ 4 files changed, 158 insertions(+), 75 deletions(-) diff --git a/lms/djangoapps/instructor/tests/test_api.py b/lms/djangoapps/instructor/tests/test_api.py index 4d3e413f5a2a..f1e7322abc6b 100644 --- a/lms/djangoapps/instructor/tests/test_api.py +++ b/lms/djangoapps/instructor/tests/test_api.py @@ -1990,6 +1990,15 @@ def test_add_notenrolled_username_autoenroll(self): self.add_notenrolled(response, self.notenrolled_student.username) assert CourseEnrollment.is_enrolled(self.notenrolled_student, self.course.id) + def test_add_notenrolled_username_autoenroll_with_multiple_users(self): + url = reverse('bulk_beta_modify_access', kwargs={'course_id': str(self.course.id)}) + identifiers = (f"Lorem@ipsum.dolor, " + f"sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed, " + f"{self.notenrolled_student.username}" + ) + response = self.client.post(url, {'identifiers': identifiers, 'action': 'add', 'email_students': False, 'auto_enroll': True}) # lint-amnesty, pylint: disable=line-too-long + assert 6, len(json.loads(response.content.decode())['results']) + @ddt.data('http', 'https') def test_add_notenrolled_with_email(self, protocol): url = reverse('bulk_beta_modify_access', kwargs={'course_id': str(self.course.id)}) diff --git a/lms/djangoapps/instructor/views/api.py b/lms/djangoapps/instructor/views/api.py index 631500fe3246..43c9c1947c71 100644 --- a/lms/djangoapps/instructor/views/api.py +++ b/lms/djangoapps/instructor/views/api.py @@ -109,6 +109,7 @@ CertificateSerializer, CertificateStatusesSerializer, ListInstructorTaskInputSerializer, + ModifyAccessSerializer, RoleNameSerializer, SendEmailSerializer, ShowStudentExtensionSerializer, @@ -914,88 +915,91 @@ def students_update_enrollment(request, course_id): # lint-amnesty, pylint: dis return JsonResponse(response_payload) -@require_POST -@ensure_csrf_cookie -@cache_control(no_cache=True, no_store=True, must_revalidate=True) -@require_course_permission(permissions.CAN_BETATEST) -@common_exceptions_400 -@require_post_params( - identifiers="stringified list of emails and/or usernames", - action="add or remove", -) -def bulk_beta_modify_access(request, course_id): +@method_decorator(cache_control(no_cache=True, no_store=True, must_revalidate=True), name='dispatch') +class BulkBetaModifyAccess(DeveloperErrorViewMixin, APIView): """ Enroll or unenroll users in beta testing program. - - Query parameters: - - identifiers is string containing a list of emails and/or usernames separated by - anything split_input_list can handle. - - action is one of ['add', 'remove'] """ - course_id = CourseKey.from_string(course_id) - action = request.POST.get('action') - identifiers_raw = request.POST.get('identifiers') - identifiers = _split_input_list(identifiers_raw) - email_students = _get_boolean_param(request, 'email_students') - auto_enroll = _get_boolean_param(request, 'auto_enroll') - results = [] - rolename = 'beta' - course = get_course_by_id(course_id) + permission_classes = (IsAuthenticated, permissions.InstructorPermission) + permission_name = permissions.CAN_BETATEST + serializer_class = ModifyAccessSerializer - email_params = {} - if email_students: - secure = request.is_secure() - email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure) + @method_decorator(ensure_csrf_cookie) + def post(self, request, course_id): + """ + Query parameters: + - identifiers is string containing a list of emails and/or usernames separated by + anything split_input_list can handle. + - action is one of ['add', 'remove'] + """ + course_id = CourseKey.from_string(course_id) + serializer = self.serializer_class(data=request.data) + if not serializer.is_valid(): + return JsonResponse({'message': serializer.errors}, status=400) - for identifier in identifiers: - try: - error = False - user_does_not_exist = False - user = get_student_from_identifier(identifier) - user_active = user.is_active + action = serializer.validated_data['action'] + identifiers = serializer.validated_data['identifiers'] + email_students = serializer.validated_data['email_students'] + auto_enroll = serializer.validated_data['auto_enroll'] - if action == 'add': - allow_access(course, user, rolename) - elif action == 'remove': - revoke_access(course, user, rolename) + results = [] + rolename = 'beta' + course = get_course_by_id(course_id) + + email_params = {} + if email_students: + secure = request.is_secure() + email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure) + + for identifier in identifiers: + try: + error = False + user_does_not_exist = False + user = get_student_from_identifier(identifier) + user_active = user.is_active + + if action == 'add': + allow_access(course, user, rolename) + elif action == 'remove': + revoke_access(course, user, rolename) + else: + return HttpResponseBadRequest(strip_tags( + f"Unrecognized action '{action}'" + )) + except User.DoesNotExist: + error = True + user_does_not_exist = True + user_active = None + # catch and log any unexpected exceptions + # so that one error doesn't cause a 500. + except Exception as exc: # pylint: disable=broad-except + log.exception("Error while #{}ing student") + log.exception(exc) + error = True else: - return HttpResponseBadRequest(strip_tags( - f"Unrecognized action '{action}'" - )) - except User.DoesNotExist: - error = True - user_does_not_exist = True - user_active = None - # catch and log any unexpected exceptions - # so that one error doesn't cause a 500. - except Exception as exc: # pylint: disable=broad-except - log.exception("Error while #{}ing student") - log.exception(exc) - error = True - else: - # If no exception thrown, see if we should send an email - if email_students: - send_beta_role_email(action, user, email_params) - # See if we should autoenroll the student - if auto_enroll: - # Check if student is already enrolled - if not is_user_enrolled_in_course(user, course_id): - CourseEnrollment.enroll(user, course_id) + # If no exception thrown, see if we should send an email + if email_students: + send_beta_role_email(action, user, email_params) + # See if we should autoenroll the student + if auto_enroll: + # Check if student is already enrolled + if not is_user_enrolled_in_course(user, course_id): + CourseEnrollment.enroll(user, course_id) - finally: - # Tabulate the action result of this email address - results.append({ - 'identifier': identifier, - 'error': error, # pylint: disable=used-before-assignment - 'userDoesNotExist': user_does_not_exist, # pylint: disable=used-before-assignment - 'is_active': user_active # pylint: disable=used-before-assignment - }) + finally: + # Tabulate the action result of this email address + results.append({ + 'identifier': identifier, + 'error': error, # pylint: disable=used-before-assignment + 'userDoesNotExist': user_does_not_exist, # pylint: disable=used-before-assignment + 'is_active': user_active # pylint: disable=used-before-assignment + }) - response_payload = { - 'action': action, - 'results': results, - } - return JsonResponse(response_payload) + response_payload = { + 'action': action, + 'results': results, + } + return JsonResponse(response_payload) @method_decorator(cache_control(no_cache=True, no_store=True, must_revalidate=True), name='dispatch') @@ -1025,7 +1029,6 @@ def post(self, request, course_id): course = get_course_with_access( request.user, 'instructor', course_id, depth=None ) - serializer_data = AccessSerializer(data=request.data) if not serializer_data.is_valid(): return HttpResponseBadRequest(reason=serializer_data.errors) diff --git a/lms/djangoapps/instructor/views/api_urls.py b/lms/djangoapps/instructor/views/api_urls.py index ea27034d0942..6f67a1a6863c 100644 --- a/lms/djangoapps/instructor/views/api_urls.py +++ b/lms/djangoapps/instructor/views/api_urls.py @@ -25,7 +25,7 @@ path('register_and_enroll_students', api.RegisterAndEnrollStudents.as_view(), name='register_and_enroll_students'), path('list_course_role_members', api.ListCourseRoleMembersView.as_view(), name='list_course_role_members'), path('modify_access', api.ModifyAccess.as_view(), name='modify_access'), - path('bulk_beta_modify_access', api.bulk_beta_modify_access, name='bulk_beta_modify_access'), + path('bulk_beta_modify_access', api.BulkBetaModifyAccess.as_view(), name='bulk_beta_modify_access'), path('get_problem_responses', api.get_problem_responses, name='get_problem_responses'), path('get_issued_certificates/', api.GetIssuedCertificates.as_view(), name='get_issued_certificates'), re_path(r'^get_students_features(?P/csv)?$', api.GetStudentsFeatures.as_view(), name='get_students_features'), diff --git a/lms/djangoapps/instructor/views/serializer.py b/lms/djangoapps/instructor/views/serializer.py index f7fc685f658c..7b20eed32f60 100644 --- a/lms/djangoapps/instructor/views/serializer.py +++ b/lms/djangoapps/instructor/views/serializer.py @@ -1,4 +1,5 @@ """ Instructor apis serializers. """ +import re from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user from django.core.exceptions import ValidationError @@ -232,6 +233,76 @@ def __init__(self, *args, **kwargs): self.fields['due_datetime'].required = False +class ModifyAccessSerializer(serializers.Serializer): + """ + serializers for enroll or un-enroll users in beta testing program. + """ + identifiers = serializers.CharField( + help_text="A comma separated list of emails or usernames.", + required=True + ) + action = serializers.ChoiceField( + choices=["add", "remove"], + help_text="Action to perform: add or remove.", + required=True + ) + + email_students = serializers.BooleanField( + default=False, + help_text="Boolean flag to indicate if students should be emailed." + ) + + auto_enroll = serializers.BooleanField( + default=False, + help_text="Boolean flag to indicate if the user should be auto-enrolled." + ) + + def validate_identifiers(self, value): + """ + Validate the 'identifiers' field which is now a list of strings. + """ + # Iterate over the list of identifiers and validate each one + validated_list = _split_input_list(value) + if not validated_list: + raise serializers.ValidationError("The identifiers list cannot be empty.") + + return validated_list + + def validate_email_students(self, value): + """ + handle string values like 'true' or 'false'. + """ + if isinstance(value, str): + return value.lower() == 'true' + return bool(value) + + def validate_auto_enroll(self, value): + """ + handle string values like 'true' or 'false'. + """ + if isinstance(value, str): + return value.lower() == 'true' + return bool(value) + + +def _split_input_list(str_list): + """ + Separate out individual student email from the comma, or space separated string. + + e.g. + in: "Lorem@ipsum.dolor, sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed" + out: ['Lorem@ipsum.dolor', 'sit@amet.consectetur', 'adipiscing@elit.Aenean', 'convallis@at.lacus', 'ut@lacinia.Sed'] + + `str_list` is a string coming from an input text area + returns a list of separated values + """ + new_list = re.split(r'[,\s\n\r]+', str_list) + new_list = [s.strip() for s in new_list] + new_list = [s for s in new_list if s != ''] + + return new_list + + class CertificateStatusesSerializer(serializers.Serializer): """ Serializer for validating and serializing certificate status inputs. From 5bf0b2704f782904be0b1fcfa7355ae486e2ea46 Mon Sep 17 00:00:00 2001 From: Bryann Valderrama <64033729+BryanttV@users.noreply.github.com> Date: Tue, 17 Dec 2024 09:52:56 -0500 Subject: [PATCH 05/20] feat: add schedule queryset request filter integration (#35982) * feat: add schedule queryset request filter integration * chore: remove try-except when running filter * chore: upgrade openedx-filters to v1.12.0 * test: add filter unit tests * test: inherit of ModuleStoreTestCase * fix: add missing attribute in resolver instance --- .../core/djangoapps/schedules/resolvers.py | 5 ++ .../schedules/tests/test_filters.py | 71 +++++++++++++++++++ requirements/edx/base.txt | 2 +- requirements/edx/development.txt | 2 +- requirements/edx/doc.txt | 2 +- requirements/edx/testing.txt | 2 +- 6 files changed, 80 insertions(+), 4 deletions(-) create mode 100644 openedx/core/djangoapps/schedules/tests/test_filters.py diff --git a/openedx/core/djangoapps/schedules/resolvers.py b/openedx/core/djangoapps/schedules/resolvers.py index 5f769e6af299..a07f9c5de7f5 100644 --- a/openedx/core/djangoapps/schedules/resolvers.py +++ b/openedx/core/djangoapps/schedules/resolvers.py @@ -14,6 +14,7 @@ from edx_ace.recipient import Recipient from edx_ace.recipient_resolver import RecipientResolver from edx_django_utils.monitoring import function_trace, set_custom_attribute +from openedx_filters.learning.filters import ScheduleQuerySetRequested from lms.djangoapps.courseware.utils import verified_upgrade_deadline_link, can_show_verified_upgrade from lms.djangoapps.discussion.notification_prefs.views import UsernameCipher @@ -154,6 +155,10 @@ def get_schedules_with_target_date_by_bin_and_orgs( schedules = self.filter_by_org(schedules) + # .. filter_implemented_name: ScheduleQuerySetRequested + # .. filter_type: org.openedx.learning.schedule.queryset.requested.v1 + schedules = ScheduleQuerySetRequested.run_filter(schedules) + if "read_replica" in settings.DATABASES: schedules = schedules.using("read_replica") diff --git a/openedx/core/djangoapps/schedules/tests/test_filters.py b/openedx/core/djangoapps/schedules/tests/test_filters.py new file mode 100644 index 000000000000..9f7efa050447 --- /dev/null +++ b/openedx/core/djangoapps/schedules/tests/test_filters.py @@ -0,0 +1,71 @@ +""" +Test cases for the Open edX Filters associated with the schedule app. +""" + +import datetime +from unittest.mock import Mock + +from django.db.models.query import QuerySet +from django.test import override_settings +from openedx_filters import PipelineStep + +from openedx.core.djangoapps.schedules.resolvers import BinnedSchedulesBaseResolver +from openedx.core.djangoapps.schedules.tests.test_resolvers import SchedulesResolverTestMixin +from openedx.core.djangolib.testing.utils import skip_unless_lms +from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase + + +class TestScheduleQuerySetRequestedPipelineStep(PipelineStep): + """Pipeline step class to test a configured pipeline step""" + + filtered_schedules = Mock(spec=QuerySet, __len__=Mock(return_value=0)) + + def run_filter(self, schedules: QuerySet): # pylint: disable=arguments-differ + """Pipeline step to filter the schedules""" + return { + "schedules": self.filtered_schedules, + } + + +@skip_unless_lms +class ScheduleQuerySetRequestedFiltersTest(SchedulesResolverTestMixin, ModuleStoreTestCase): + """ + Tests for the Open edX Filters associated with the schedule queryset requested. + + The following filters are tested: + - ScheduleQuerySetRequested + """ + + def setUp(self): + super().setUp() + self.resolver = BinnedSchedulesBaseResolver( + async_send_task=Mock(name="async_send_task"), + site=self.site, + target_datetime=datetime.datetime.now(), + day_offset=3, + bin_num=2, + ) + self.resolver.schedule_date_field = "created" + + @override_settings( + OPEN_EDX_FILTERS_CONFIG={ + "org.openedx.learning.schedule.queryset.requested.v1": { + "pipeline": [ + "openedx.core.djangoapps.schedules.tests.test_filters.TestScheduleQuerySetRequestedPipelineStep", + ], + "fail_silently": False, + }, + }, + ) + def test_schedule_with_queryset_requested_filter_enabled(self) -> None: + """Test to verify the schedule queryset was modified by the pipeline step.""" + schedules = self.resolver.get_schedules_with_target_date_by_bin_and_orgs() + + self.assertEqual(TestScheduleQuerySetRequestedPipelineStep.filtered_schedules, schedules) + + @override_settings(OPEN_EDX_FILTERS_CONFIG={}) + def test_schedule_with_queryset_requested_filter_disabled(self) -> None: + """Test to verify the schedule queryset was not modified when the pipeline step is not configured.""" + schedules = self.resolver.get_schedules_with_target_date_by_bin_and_orgs() + + self.assertNotEqual(TestScheduleQuerySetRequestedPipelineStep.filtered_schedules, schedules) diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index f95ae8adb2c9..ccedfe5a0227 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -836,7 +836,7 @@ openedx-events==9.15.0 # edx-name-affirmation # event-tracking # ora2 -openedx-filters==1.11.0 +openedx-filters==1.12.0 # via # -r requirements/edx/kernel.in # lti-consumer-xblock diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 75c533947407..0191c779d363 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -1390,7 +1390,7 @@ openedx-events==9.15.0 # edx-name-affirmation # event-tracking # ora2 -openedx-filters==1.11.0 +openedx-filters==1.12.0 # via # -r requirements/edx/doc.txt # -r requirements/edx/testing.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index f7031d349784..dacd1efa6143 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -1005,7 +1005,7 @@ openedx-events==9.15.0 # edx-name-affirmation # event-tracking # ora2 -openedx-filters==1.11.0 +openedx-filters==1.12.0 # via # -r requirements/edx/base.txt # lti-consumer-xblock diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 57a0dc6341ad..db1ed7889667 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -1050,7 +1050,7 @@ openedx-events==9.15.0 # edx-name-affirmation # event-tracking # ora2 -openedx-filters==1.11.0 +openedx-filters==1.12.0 # via # -r requirements/edx/base.txt # lti-consumer-xblock From f0eb0da1bab5e5d769f22853d811312654975195 Mon Sep 17 00:00:00 2001 From: Tim McCormack Date: Tue, 17 Dec 2024 10:35:08 -0500 Subject: [PATCH 06/20] Revert "temp: Add temporary monitoring for large memcache keys (#36034)" (#36040) This reverts commit fb56042bdc125cff060afb61ca4bb6d4cc066f47. We have the data we need. --- common/djangoapps/util/memcache.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/common/djangoapps/util/memcache.py b/common/djangoapps/util/memcache.py index 723ca87098d1..2f7e6dc623a0 100644 --- a/common/djangoapps/util/memcache.py +++ b/common/djangoapps/util/memcache.py @@ -9,7 +9,6 @@ from django.conf import settings from django.utils.encoding import smart_str -from edx_django_utils.monitoring.utils import increment def fasthash(string): @@ -49,18 +48,9 @@ def safe_key(key, key_prefix, version): # Attempt to combine the prefix, version, and key combined = ":".join([key_prefix, version, key]) - # Temporary: Add observability to large-key hashing to help us - # understand the safety of a cutover from md4 to blake2b hashing. - # See https://github.com/edx/edx-arch-experiments/issues/872 - increment('memcache.safe_key.called') - # If the total length is too long for memcache, hash it if len(combined) > 250: combined = fasthash(combined) - # Temporary: See - # https://github.com/edx/edx-arch-experiments/issues/872 and - # previous comment. - increment('memcache.safe_key.hash_large') # Return the result return combined From 882475dd5e81df717100ae3e54f838c326370494 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 17:02:36 +0000 Subject: [PATCH 07/20] feat: Upgrade Python dependency edx-enterprise (#36041) Bump version of edx-enterprise to v5.5.0. Commit generated by workflow `openedx/edx-platform/.github/workflows/upgrade-one-python-dependency.yml@refs/heads/master` Co-authored-by: adamstankiewicz <2828721+adamstankiewicz@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/edx/base.txt | 2 +- requirements/edx/development.txt | 2 +- requirements/edx/doc.txt | 2 +- requirements/edx/testing.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d7608873644e..541197d83242 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -78,7 +78,7 @@ django-storages<1.14.4 # The team that owns this package will manually bump this package rather than having it pulled in automatically. # This is to allow them to better control its deployment and to do it in a process that works better # for them. -edx-enterprise==5.4.2 +edx-enterprise==5.5.0 # Date: 2024-05-09 # This has to be constrained as well because newer versions of edx-i18n-tools need the diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index 62a7d37c054a..dfcc8bcc5dd9 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -472,7 +472,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.4.2 +edx-enterprise==5.5.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 76307eacdcb2..15f08c1377c0 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -748,7 +748,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.4.2 +edx-enterprise==5.5.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/doc.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index f9e2be39f4c2..d23eba58993d 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -555,7 +555,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.4.2 +edx-enterprise==5.5.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 70bed474e154..98178143005d 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -576,7 +576,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.4.2 +edx-enterprise==5.5.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt From 98214a54167444781115c68593d5901c29cd6098 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 15:22:01 -0500 Subject: [PATCH 08/20] feat: Upgrade Python dependency edx-enterprise (#36042) Commit generated by workflow `openedx/edx-platform/.github/workflows/upgrade-one-python-dependency.yml@refs/heads/master` Co-authored-by: bcitro <67378070+bcitro@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/edx/base.txt | 2 +- requirements/edx/development.txt | 2 +- requirements/edx/doc.txt | 2 +- requirements/edx/testing.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 541197d83242..24d822f08744 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -78,7 +78,7 @@ django-storages<1.14.4 # The team that owns this package will manually bump this package rather than having it pulled in automatically. # This is to allow them to better control its deployment and to do it in a process that works better # for them. -edx-enterprise==5.5.0 +edx-enterprise==5.5.1 # Date: 2024-05-09 # This has to be constrained as well because newer versions of edx-i18n-tools need the diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index dfcc8bcc5dd9..6f942d00b096 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -472,7 +472,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.0 +edx-enterprise==5.5.1 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 15f08c1377c0..97b74c737b6f 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -748,7 +748,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.0 +edx-enterprise==5.5.1 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/doc.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index d23eba58993d..4261f47c2be7 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -555,7 +555,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.0 +edx-enterprise==5.5.1 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 98178143005d..dbc2ea7613f5 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -576,7 +576,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.0 +edx-enterprise==5.5.1 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt From e2a4b9e52411574216137ae4dee08c7a15adae22 Mon Sep 17 00:00:00 2001 From: Ahtisham Shahid Date: Wed, 18 Dec 2024 15:38:05 +0500 Subject: [PATCH 09/20] fix: updated goal reminder email time logger (#36045) --- .../course_goals/management/commands/goal_reminder_email.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py b/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py index eea03bd79455..a221edc8455f 100644 --- a/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py +++ b/lms/djangoapps/course_goals/management/commands/goal_reminder_email.py @@ -1,6 +1,7 @@ """ Command to trigger sending reminder emails for learners to achieve their Course Goals """ +import time from datetime import date, datetime, timedelta from eventtracking import tracker import logging @@ -119,9 +120,9 @@ def send_ace_message(goal, session_id): with emulate_http_request(site, user): try: - start_time = datetime.now() + start_time = time.perf_counter() ace.send(msg) - end_time = datetime.now() + end_time = time.perf_counter() log.info(f"Goal Reminder for {user.id} for course {goal.course_key} sent in {end_time - start_time} " f"using {'SES' if is_ses_enabled else 'others'}") except Exception as exc: # pylint: disable=broad-except From bdb1ae53151c43de258b5997f011d171c73523d3 Mon Sep 17 00:00:00 2001 From: marlonkeating <322346+marlonkeating@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:19:29 +0000 Subject: [PATCH 10/20] feat: Upgrade Python dependency edx-enterprise This change adds support for the page_size query parameter on the enterprise-customer-members endpoint. Commit generated by workflow `openedx/edx-platform/.github/workflows/upgrade-one-python-dependency.yml@refs/heads/master` --- requirements/constraints.txt | 2 +- requirements/edx/base.txt | 2 +- requirements/edx/development.txt | 2 +- requirements/edx/doc.txt | 2 +- requirements/edx/testing.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 24d822f08744..e65e19a574fc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -78,7 +78,7 @@ django-storages<1.14.4 # The team that owns this package will manually bump this package rather than having it pulled in automatically. # This is to allow them to better control its deployment and to do it in a process that works better # for them. -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # Date: 2024-05-09 # This has to be constrained as well because newer versions of edx-i18n-tools need the diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index 6f942d00b096..bbe8c32ce549 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -472,7 +472,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 97b74c737b6f..e32139843ed6 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -748,7 +748,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/doc.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index 4261f47c2be7..76005577e958 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -555,7 +555,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index dbc2ea7613f5..5880b578d9a7 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -576,7 +576,7 @@ edx-drf-extensions==10.5.0 # edx-when # edxval # openedx-learning -edx-enterprise==5.5.1 +edx-enterprise==5.5.2 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt From 4c0d94e367033351690e6c776ff28a567930cce8 Mon Sep 17 00:00:00 2001 From: "Kyle D. McCormick" Date: Thu, 12 Dec 2024 14:11:13 -0500 Subject: [PATCH 11/20] docs: Update testing guide --- docs/concepts/testing/testing.rst | 324 +++++++++--------------------- 1 file changed, 94 insertions(+), 230 deletions(-) diff --git a/docs/concepts/testing/testing.rst b/docs/concepts/testing/testing.rst index 9d448afd5bdc..d8a79b9d6619 100644 --- a/docs/concepts/testing/testing.rst +++ b/docs/concepts/testing/testing.rst @@ -1,4 +1,3 @@ -####### Testing ####### @@ -7,7 +6,7 @@ Testing :depth: 3 Overview -======== +******** We maintain two kinds of tests: unit tests and integration tests. @@ -26,10 +25,10 @@ tests. Most of our tests are unit tests or integration tests. Test Types ----------- +========== Unit Tests -~~~~~~~~~~ +---------- - Each test case should be concise: setup, execute, check, and teardown. If you find yourself writing tests with many steps, @@ -38,18 +37,18 @@ Unit Tests - As a rule of thumb, your unit tests should cover every code branch. -- Mock or patch external dependencies. We use the voidspace `Mock Library`_. +- Mock or patch external dependencies using `unittest.mock`_ functions. - We unit test Python code (using `unittest`_) and Javascript (using `Jasmine`_) -.. _Mock Library: http://www.voidspace.org.uk/python/mock/ +.. _unittest.mock: https://docs.python.org/3/library/unittest.mock.html .. _unittest: http://docs.python.org/2/library/unittest.html .. _Jasmine: http://jasmine.github.io/ Integration Tests -~~~~~~~~~~~~~~~~~ +----------------- - Test several units at the same time. Note that you can still mock or patch dependencies that are not under test! For example, you might test that @@ -67,7 +66,7 @@ Integration Tests .. _Django test client: https://docs.djangoproject.com/en/dev/topics/testing/overview/ Test Locations --------------- +============== - Python unit and integration tests: Located in subpackages called ``tests``. For example, the tests for the ``capa`` package are @@ -80,14 +79,29 @@ Test Locations the test for ``src/views/module.js`` should be written in ``spec/views/module_spec.js``. -Running Tests -============= +Factories +========= -**Unless otherwise mentioned, all the following commands should be run from inside the lms docker container.** +Many tests delegate set-up to a "factory" class. For example, there are +factories for creating courses, problems, and users. This encapsulates +set-up logic from tests. +Factories are often implemented using `FactoryBoy`_. + +In general, factories should be located close to the code they use. For +example, the factory for creating problem XML definitions is located in +``xmodule/capa/tests/response_xml_factory.py`` because the +``capa`` package handles problem XML. + +.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/ Running Python Unit tests -------------------------- +************************* + +The following commands need to be run within a Python environment in +which requirements/edx/testing.txt has been installed. If you are using a +Docker-based Open edX distribution, then you probably will want to run these +commands within the LMS and/or CMS Docker containers. We use `pytest`_ to run Python tests. Pytest is a testing framework for python and should be your goto for local Python unit testing. @@ -97,16 +111,16 @@ Pytest (and all of the plugins we use with it) has a lot of options. Use `pytest Running Python Test Subsets -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +=========================== When developing tests, it is often helpful to be able to really just run one single test without the overhead of PIP installs, UX builds, etc. Various ways to run tests using pytest:: - pytest path/test_m­odule.py # Run all tests in a module. - pytest path/test_m­odule.p­y:­:te­st_func # Run a specific test within a module. - pytest path/test_m­odule.p­y:­:Te­stC­las­s # Run all tests in a class - pytest path/test_m­odule.p­y:­:Te­stC­las­s::­tes­t_m­ethod # Run a specific method of a class. + pytest path/test_module.py # Run all tests in a module. + pytest path/test_module.py::test_func # Run a specific test within a module. + pytest path/test_module.py::TestClass # Run all tests in a class + pytest path/test_module.py::TestClass::test_method # Run a specific method of a class. pytest path/testing/ # Run all tests in a directory. For example, this command runs a single python unit test file:: @@ -114,7 +128,7 @@ For example, this command runs a single python unit test file:: pytest xmodule/tests/test_stringify.py Note - -edx-platorm has multiple services (lms, cms) in it. The environment for each service is different enough that we run some tests in both environments in Github Actions. +edx-platorm has multiple services (lms, cms) in it. The environment for each service is different enough that we run some tests in both environments in Github Actions. To test in each of these environments (especially for tests in "common" and "xmodule" directories), you will need to test in each seperately. To specify that the tests are run with the relevant service as root, Add --rootdir flag at end of your pytest call and specify the env to test in:: @@ -139,7 +153,7 @@ Various tools like ddt create tests with very complex names, rather than figurin pytest xmodule/tests/test_stringify.py --collectonly Testing with migrations -*********************** +======================= For the sake of speed, by default the python unit test database tables are created directly from apps' models. If you want to run the tests @@ -149,7 +163,7 @@ against a database created by applying the migrations instead, use the pytest test --create-db --migrations Debugging a test -~~~~~~~~~~~~~~~~ +================ There are various ways to debug tests in Python and more specifically with pytest: @@ -173,7 +187,7 @@ There are various ways to debug tests in Python and more specifically with pytes How to output coverage locally -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +============================== These are examples of how to run a single test and get coverage:: @@ -220,234 +234,84 @@ run one of these commands:: .. _YouTube stub server: https://github.com/openedx/edx-platform/blob/master/common/djangoapps/terrain/stubs/tests/test_youtube_stub.py -Debugging Unittest Flakiness -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -As we move over to running our unittests with Jenkins Pipelines and pytest-xdist, -there are new ways for tests to flake, which can sometimes be difficult to debug. -If you run into flakiness, check (and feel free to contribute to) this -`confluence document `__ for help. - -Running Javascript Unit Tests ------------------------------ - -Before running Javascript unit tests, you will need to be running Firefox or Chrome in a place visible to edx-platform. If running this in devstack, you can run ``make dev.up.firefox`` or ``make dev.up.chrome``. Firefox is the default browser for the tests, so if you decide to use Chrome, you will need to prefix the test command with ``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome`` (if using devstack). - -We use Jasmine to run JavaScript unit tests. To run all the JavaScript -tests:: - - paver test_js - -To run a specific set of JavaScript tests and print the results to the -console, run these commands:: - - paver test_js_run -s lms - paver test_js_run -s cms - paver test_js_run -s cms-squire - paver test_js_run -s xmodule - paver test_js_run -s xmodule-webpack - paver test_js_run -s common - paver test_js_run -s common-requirejs - -To run JavaScript tests in a browser, run these commands:: - - paver test_js_dev -s lms - paver test_js_dev -s cms - paver test_js_dev -s cms-squire - paver test_js_dev -s xmodule - paver test_js_dev -s xmodule-webpack - paver test_js_dev -s common - paver test_js_dev -s common-requirejs - -Debugging Specific Javascript Tests -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The best way to debug individual tests is to run the test suite in the browser and -use your browser's Javascript debugger. The debug page will allow you to select -an individual test and only view the results of that test. - - -Debugging Tests in a Browser -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To debug these tests on devstack in a local browser: - -* first run the appropriate test_js_dev command from above -* open http://localhost:19876/debug.html in your host system's browser of choice -* this will run all the tests and show you the results including details of any failures -* you can click on an individually failing test and/or suite to re-run it by itself -* you can now use the browser's developer tools to debug as you would any other JavaScript code - -Note: the port is also output to the console that you ran the tests from if you find that easier. - -These paver commands call through to Karma. For more -info, see `karma-runner.github.io `__. - -Testing internationalization with dummy translations ----------------------------------------------------- - -Any text you add to the platform should be internationalized. To generate translations for your new strings, run the following command:: - - paver i18n_dummy - -This command generates dummy translations for each dummy language in the -platform and puts the dummy strings in the appropriate language files. -You can then preview the dummy languages on your local machine and also in your sandbox, if and when you create one. - -The dummy language files that are generated during this process can be -found in the following locations:: - - conf/locale/{LANG_CODE} - -There are a few JavaScript files that are generated from this process. You can find those in the following locations:: - - lms/static/js/i18n/{LANG_CODE} - cms/static/js/i18n/{LANG_CODE} - -Do not commit the ``.po``, ``.mo``, ``.js`` files that are generated -in the above locations during the dummy translation process! +Handling flaky unit tests +========================= -Test Coverage and Quality -------------------------- +See this `confluence document `_. -Viewing Test Coverage -~~~~~~~~~~~~~~~~~~~~~ -We currently collect test coverage information for Python -unit/integration tests. +Running JavaScript Unit Tests +***************************** -To view test coverage: +Before running Javascript unit tests, you will need to be running Firefox or Chrome in a place visible to edx-platform. +If you are using Tutor Dev to run edx-platform, then you can do so by installing and enabling the +``test-legacy-js`` plugin from `openedx-tutor-plugins`_, and then rebuilding +the ``openedx-dev`` image:: -1. Run the test suite with this command:: + tutor plugins install https://github.com/openedx/openedx-tutor-plugins/tree/main/plugins/tutor-contrib-test-legacy-js + tutor plugins enable test-legacy-js + tutor images build openedx-dev - paver test +.. _openedx-tutor-plugins: https://github.com/openedx/openedx-tutor-plugins/ -2. Generate reports with this command:: +We use Jasmine (via Karma) to run most JavaScript unit tests. We use Jest to +run a small handful of additional JS unit tests. You can use the ``npm run +test*`` commands to run them:: - paver coverage + npm run test-karma # Run all Jasmine+Karma tests. + npm run test-jest # Run all Jest tests. + npm run test # Run both of the above. -3. Reports are located in the ``reports`` folder. The command generates - HTML and XML (Cobertura format) reports. +The Karma tests are further broken down into three types depending on how the +JavaScript it is testing is built:: -Python Code Style Quality -~~~~~~~~~~~~~~~~~~~~~~~~~ + npm run test-karma-vanilla # Our very oldest JS, which doesn't even use RequireJS + npm run test-karma-require # Old JS that uses RequireJS + npm run test-karma-webpack # Slightly "newer" JS which is built with Webpack -To view Python code style quality (including PEP 8 and pylint violations) run this command:: +Unfortunately, at the time of writing, the build for the ``test-karma-webpack`` +tests is broken. The tests are excluded from ``npm run test-karma`` as to not +fail CI. We `may fix this one day`_. - paver run_quality +.. _may fix this one day: https://github.com/openedx/edx-platform/issues/35956 -More specific options are below. +To run all Karma+Jasmine tests for a particular top-level edx-platform folder, +you can run:: -- These commands run a particular quality report:: + npm run test-cms + npm run test-lms + npm run test-xmodule + npm run test-common - paver run_pep8 - paver run_pylint - -- This command runs a report, and sets it to fail if it exceeds a given number - of violations:: - - paver run_pep8 --limit=800 - -- The ``run_quality`` uses the underlying diff-quality tool (which is packaged - with `diff-cover`_). With that, the command can be set to fail if a certain - diff threshold is not met. For example, to cause the process to fail if - quality expectations are less than 100% when compared to master (or in other - words, if style quality is worse than what is already on master):: - - paver run_quality --percentage=100 - -- Note that 'fixme' violations are not counted with run\_quality. To - see all 'TODO' lines, use this command:: - - paver find_fixme --system=lms - - ``system`` is an optional argument here. It defaults to - ``cms,lms,common``. - -.. _diff-cover: https://github.com/Bachmann1234/diff-cover - - -JavaScript Code Style Quality -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To view JavaScript code style quality run this command:: - - paver run_eslint - -- This command also comes with a ``--limit`` switch, this is an example of that switch:: - - paver run_eslint --limit=50000 - - -Code Complexity Tools -===================== - -Tool(s) available for evaluating complexity of edx-platform code: - - -- `plato `__ for JavaScript code - complexity. Several options are available on the command line; see - documentation. Below, the following command will produce an HTML report in a - subdirectory called "jscomplexity":: - - plato -q -x common/static/js/vendor/ -t common -e .eslintrc.json -r -d jscomplexity common/static/js/ - -Other Testing Tips -================== - -Connecting to Browser ---------------------- - -If you want to see the browser being automated for JavaScript, -you can connect to the container running it via VNC. - -+------------------------+----------------------+ -| Browser | VNC connection | -+========================+======================+ -| Firefox (Default) | vnc://0.0.0.0:25900 | -+------------------------+----------------------+ -| Chrome (via Selenium) | vnc://0.0.0.0:15900 | -+------------------------+----------------------+ - -On macOS, enter the VNC connection string in Safari to connect via VNC. The VNC -passwords for both browsers are randomly generated and logged at container -startup, and can be found by running ``make vnc-passwords``. - -Most tests are run in Firefox by default. To use Chrome for tests that normally -use Firefox instead, prefix the test command with -``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome`` - -Factories ---------- - -Many tests delegate set-up to a "factory" class. For example, there are -factories for creating courses, problems, and users. This encapsulates -set-up logic from tests. - -Factories are often implemented using `FactoryBoy`_. - -In general, factories should be located close to the code they use. For -example, the factory for creating problem XML definitions is located in -``xmodule/capa/tests/response_xml_factory.py`` because the -``capa`` package handles problem XML. - -.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/ +Finally, if you want to pass any options to the underlying ``node`` invocation +for Karma+Jasmine tests, you can run one of these specific commands, and put +your arguments after the ``--`` separator:: -Running Tests on Paver Scripts ------------------------------- + npm run test-cms-vanilla -- --your --args --here + npm run test-cms-require -- --your --args --here + npm run test-cms-webpack -- --your --args --here + npm run test-lms-webpack -- --your --args --here + npm run test-xmodule-vanilla -- --your --args --here + npm run test-xmodule-webpack -- --your --args --here + npm run test-common-vanilla -- --your --args --here + npm run test-common-require -- --your --args --here -To run tests on the scripts that power the various Paver commands, use the following command:: - pytest pavelib +Code Quality +************ -Testing using queue servers ---------------------------- +We use several tools to analyze code quality. The full set of them is:: -When testing problems that use a queue server on AWS (e.g. -sandbox-xqueue.edx.org), you'll need to run your server on your public IP, like so:: + mypy $PATHS... + pycodestyle $PATHS... + pylint $PATHS... + lint-imports + scripts/verify-dunder-init.sh + make xsslint + make pii_check + make check_keywords + npm run lint - ./manage.py lms runserver 0.0.0.0:8000 +Where ``$PATHS...`` is a list of folders and files to analyze, or nothing if +you would like to analyze the entire codebase (which can take a while). -When you connect to the LMS, you need to use the public ip. Use -``ifconfig`` to figure out the number, and connect e.g. to -``http://18.3.4.5:8000/`` From 41d28f3ce332d824891a6127367fe1d0cc21181c Mon Sep 17 00:00:00 2001 From: "Kyle D. McCormick" Date: Tue, 17 Dec 2024 15:30:07 -0500 Subject: [PATCH 12/20] docs: Turn old static asset plan into a retroactive ADR --- .../0000-static-asset-plan.rst} | 53 ++++++++++--------- 1 file changed, 29 insertions(+), 24 deletions(-) rename docs/{concepts/frontend/static_assets.rst => decisions/0000-static-asset-plan.rst} (90%) diff --git a/docs/concepts/frontend/static_assets.rst b/docs/decisions/0000-static-asset-plan.rst similarity index 90% rename from docs/concepts/frontend/static_assets.rst rename to docs/decisions/0000-static-asset-plan.rst index 89e7a64f4455..00fcc0726ad7 100644 --- a/docs/concepts/frontend/static_assets.rst +++ b/docs/decisions/0000-static-asset-plan.rst @@ -1,6 +1,22 @@ -####################################### -edx-platform Static Asset Pipeline Plan -####################################### +0. edx-platform Static Asset Pipeline Plan +########################################## + +Status +****** + +Accepted ~2017 +Partially adopted 2017-2024 + +This was an old plan for modernizing Open edX's frontend assets. We've +retroactively turned it into an ADR because it has some valuable insights. +Although most of these improvements weren't applied as written, these ideas +(particularly, separating Python concerns from frontend tooling concerns) were +applied to both legacy edx-platform assets as well as the Micro-Frontend +framework that was developed 2017-2019. + +Context, Decision, Consequences +******************************* + Static asset handling in edx-platform has evolved in a messy way over the years. This has led to a lot of complexity and inconsistencies. This is a proposal for @@ -9,20 +25,9 @@ this is not a detailed guide for how to write React or Bootstrap code. This is instead going to talk about conventions for how we arrange, extract, and compile static assets. -Big Open Questions (TODO) -************************* - -This document is a work in progress, as the design for some of this is still in -flux, particularly around extensibility. - -* Pluggable third party apps and Webpack packaging. -* Keep the Django i18n mechanism? -* Stance on HTTP/2 and bundling granularity. -* Optimizing theme assets. -* Tests Requirements -************ +============ Any proposed solution must support: @@ -35,7 +40,7 @@ Any proposed solution must support: * Other kinds of pluggability??? Assumptions -*********** +=========== Some assumptions/opinions that this proposal is based on: @@ -54,8 +59,8 @@ Some assumptions/opinions that this proposal is based on: * It should be possible to pre-build static assets and deploy them onto S3 or similar. -Where We Are Today -****************** +Where We Are Today (2017) +========================= We have a static asset pipeline that is mostly driven by Django's built-in staticfiles finders and the collectstatic process. We use the popular @@ -95,9 +100,9 @@ places (typically ``/edx/var/edxapp/staticfiles`` for LMS and ``/edx/var/edxapp/staticfiles/studio`` for Studio) and can be collected separately. However in practice they're always run together because we deploy them from the same commits and to the same servers. - + Django vs. Webpack Conventions -****************************** +============================== The Django convention for having an app with bundled assets is to namespace them locally with the app name so that they get their own directories when they are @@ -112,7 +117,7 @@ the root of edx-platform, which would specify all bundles in the project. TODO: The big, "pluggable Webpack components" question. Proposed Repo Structure -*********************** +======================= All assets that are in common spaces like ``common/static``, ``lms/static``, and ``cms/static`` would be moved to be under the Django apps that they are a @@ -122,7 +127,7 @@ part of and follow the Django naming convention (e.g. any client-side templates will be put in ``static/{appname}/templates``. Proposed Compiled Structure -*************************** +=========================== This is meant to be a sample of the different types of things we'd have, not a full list: @@ -150,14 +155,14 @@ full list: /theming/themes/open-edx /red-theme /edx.org - + # XBlocks still collect their assets into a common space (/xmodule goes away) # We consider this to be the XBlock Runtime's app, and it collects static # assets from installed XBlocks. /xblock Django vs. Webpack Roles -************************ +======================== Rule of thumb: Django/Python still serves static assets, Webpack processes and optimizes them. From 29d4c69e5716168cfc23c2230f77de083eee6199 Mon Sep 17 00:00:00 2001 From: "Kyle D. McCormick" Date: Fri, 13 Dec 2024 15:46:39 -0500 Subject: [PATCH 13/20] build!: Rejigger "npm run test" to be more intuitive This is technically a breaking change, but these commands were added a week ago and were not yet documented, so I'm not worried about breaking anyone's workflow with this commit. --- package.json | 35 ++++++++++++++++++----------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/package.json b/package.json index 2f09f8a7df90..8ed93a322633 100644 --- a/package.json +++ b/package.json @@ -14,24 +14,25 @@ "watch-webpack": "npm run webpack-dev -- --watch", "watch-sass": "scripts/watch_sass.sh", "lint": "python scripts/eslint.py", - "test": "npm run test-cms && npm run test-lms && npm run test-xmodule && npm run test-common && npm run test-jest", - "test-kind-vanilla": "npm run test-cms-vanilla && npm run test-xmodule-vanilla && npm run test-common-vanilla", - "test-kind-require": "npm run test-cms-require && npm run test-common-require", - "test-kind-webpack": "npm run test-cms-webpack && npm run test-lms-webpack && npm run test-xmodule-webpack", - "test-cms": "npm run test-cms-vanilla && npm run test-cms-require", - "test-cms-vanilla": "npm run test-suite -- cms/static/karma_cms.conf.js", - "test-cms-require": "npm run test-suite -- cms/static/karma_cms_squire.conf.js", - "test-cms-webpack": "npm run test-suite -- cms/static/karma_cms_webpack.conf.js", - "test-lms": "echo 'WARNING: Webpack JS tests are disabled. No LMS JS tests will be run. See https://github.com/openedx/edx-platform/issues/35956 for details.'", - "test-lms-webpack": "npm run test-suite -- lms/static/karma_lms.conf.js", - "test-xmodule": "npm run test-xmodule-vanilla", - "test-xmodule-vanilla": "npm run test-suite -- xmodule/js/karma_xmodule.conf.js", - "test-xmodule-webpack": "npm run test-suite -- xmodule/js/karma_xmodule_webpack.conf.js", + "test": "npm run test-jest && npm run test-karma", + "test-jest": "jest", + "test-karma": "npm run test-karma-vanilla && npm run test-karma-require && echo 'WARNING: Skipped broken webpack tests. For details, see: https://github.com/openedx/edx-platform/issues/35956'", + "test-karma-vanilla": "npm run test-cms-vanilla && npm run test-xmodule-vanilla && npm run test-common-vanilla", + "test-karma-require": "npm run test-cms-require && npm run test-common-require", + "test-karma-webpack": "npm run test-cms-webpack && npm run test-lms-webpack && npm run test-xmodule-webpack", + "test-karma-conf": "${NODE_WRAPPER:-xvfb-run --auto-servernum} node --max_old_space_size=4096 node_modules/.bin/karma start --single-run=true --capture-timeout=60000 --browsers=FirefoxNoUpdates", + "test-cms": "npm run test-cms-vanilla && npm run test-cms-require && npm run test-cms-webpack", + "test-cms-vanilla": "npm run test-karma-conf -- cms/static/karma_cms.conf.js", + "test-cms-require": "npm run test-karma-conf -- cms/static/karma_cms_squire.conf.js", + "test-cms-webpack": "npm run test-karma-conf -- cms/static/karma_cms_webpack.conf.js", + "test-lms": "npm run test-jest && npm run test-lms-webpack", + "test-lms-webpack": "npm run test-karma-conf -- lms/static/karma_lms.conf.js", + "test-xmodule": "npm run test-xmodule-vanilla && npm run test-xmodule-webpack", + "test-xmodule-vanilla": "npm run test-karma-conf -- xmodule/js/karma_xmodule.conf.js", + "test-xmodule-webpack": "npm run test-karma-conf -- xmodule/js/karma_xmodule_webpack.conf.js", "test-common": "npm run test-common-vanilla && npm run test-common-require", - "test-common-vanilla": "npm run test-suite -- common/static/karma_common.conf.js", - "test-common-require": "npm run test-suite -- common/static/karma_common_requirejs.conf.js", - "test-suite": "${NODE_WRAPPER:-xvfb-run --auto-servernum} node --max_old_space_size=4096 node_modules/.bin/karma start --single-run=true --capture-timeout=60000 --browsers=FirefoxNoUpdates", - "test-jest": "jest" + "test-common-vanilla": "npm run test-karma-conf -- common/static/karma_common.conf.js", + "test-common-require": "npm run test-karma-conf -- common/static/karma_common_requirejs.conf.js" }, "dependencies": { "@babel/core": "7.26.0", From 54250632d8b83feb5613f8e8920cc229390d2309 Mon Sep 17 00:00:00 2001 From: Kyle McCormick Date: Wed, 18 Dec 2024 14:09:43 -0500 Subject: [PATCH 14/20] docs: ADR for Fixing Quality and JS Checks (#35741) This is a record of the various existing CI issues we faced and the trade-offs we made to move forward while replacing the Paver CI commands as part of: https://github.com/openedx/edx-platform/issues/34467 --- .../0021-fixing-quality-and-js-checks.rst | 143 ++++++++++++++++++ 1 file changed, 143 insertions(+) create mode 100644 docs/decisions/0021-fixing-quality-and-js-checks.rst diff --git a/docs/decisions/0021-fixing-quality-and-js-checks.rst b/docs/decisions/0021-fixing-quality-and-js-checks.rst new file mode 100644 index 000000000000..7d80039a8cb0 --- /dev/null +++ b/docs/decisions/0021-fixing-quality-and-js-checks.rst @@ -0,0 +1,143 @@ +Fixing the Quality and JS checks +################################ + +Status +****** + +Accepted + +Implemented by https://github.com/openedx/edx-platform/pull/35159 + +Context +******* + +edx-platform PRs need to pass a series of CI checks before merging, including +but not limited to: a CLA check, various unit tests, and various code quality +tests. Of these checks, two checks were implemented using the "Paver" Python +package, a scripting library `which we have been trying to move off of`_. These +two checks and their steps were: + +* **Check: Quality others** + + * **pii_check**: Ensure that Django models have PII annotations as + described in `OEP-30`_, with a minimum threshold of **94.5%** of models + annotated. + * **stylelint**: Statically check sass stylesheets for common errors. + * **pep8**: Run pycodestyle against Python code. + * **eslint**: Statically check javascript code for common errors. + * **xsslint**: Check python & javascript for xss vulnerabilities. + * **check_keywords**: Compare Django model field names against a denylist of + reserved keywords. + +* **Check: JS** + + * **test-js**: Run javascript unit tests. + * **coverage-js**: Check that javascript test coverage has not dropped. + +As we worked to reimplement these checks without Paver, we unfortunately +noticed that four of those steps had bugs in their implementations, and thus +had not been enforcing what they promised to: + +* **pii_check**: Instead of just checking the result of the underlying + code_annotations command, this check wrote an annotations report to a file, + and then used regex to parse the report and determine whether the check + should pass. However, the check failed to validate that the generation of the + report itself was successful. So, when malformed annotations were introduced + to the edx-proctoring repository, which edx-platform installs, the check + began silently passing. + +* **stylelint**: At some point, the `stylelint` binary stopped being available + on the runner's `$PATH`. Rather than causing the Quality Others check to + fail, the Paver code quietly ignored the shell error, and considered the + empty stylelint report file to indicate that there were not linting + violations. + +* **test-js**: There are eight suites within test-js. Six of them work fine. + But three of them--specifically the suites that test code built by Webpack-- + have not been running for some unknown amount of time. The Webpack test build + has been failing without signalling that the test suite should fail, + both preventing the tests from runnning and preventing anyone from noticing + that the tests weren't running. + +* **coverage-js**: This check tried to use `diff-cover` in order to compare the + coverage report on the current branch with the coverage report on the master + branch. However, the coverage report does not exist on the master branch, and + it's not clear when it ever did. The coverage-js step failed to validate that + `diff-cover` ran successfully, and instead of raising an error, it allowed + the JS check to pass. + +Decision & Consequences +*********************** + +pii_check +========= + +We `fixed the malformed annotations`_ in edx-proctoring, allowing the pii_check +to once again check model coverage. We have ensured that any future failure of +the code_annotations command (due to, for example, future malformed +annotations) will cause the pii_check step and the overall Quality Others check +to fail. We have stopped trying to parse the result of the annotations report +in CI, as this was and is completely unneccessary. + +In order to keep "Quality others" passing on the edx-platform master branch, we +lowered the PII annotation coverage threshold to reflect the percentage of +then-annotated models: **71.6%**. After a timeboxed effort to add missing +annotations and expand the annotation allowlist as appropriate, we have managed +to raise the threshold to **85.3%**. It is not clear whether we will put in +further effort to raise the annotation threshold back to 95%. + +This was all already `announced on the forums`_. + +stylelint +========= + +We have removed the **stylelint** step entirely from the "Quality Others" +check. Sass code in the edx-platform repository will no longer be subject to +any static analysis. + +test-js +======= + +We have stopped running these Webpack-based suites in CI: + +* ``npm run test-lms-webpack`` +* ``npm run test-cms-webpack`` +* ``npm run test-xmodule-webpack`` + +We have created a new edx-platform backlog issue for +`fixing and re-enabling these suites`_. +It is not clear whether we will prioritize that issue, or instead prioritize +deprecation and removal of the code that those suites were supposed to be +testing. + +coverage-js +=========== + +We will remove the **coverage-js** step entirely from the "JS" check. +JavaScript code in the edx-platform repository will no longer be subject to any +unit test coverage checking. + +Rejected Alternatives +********************* + +* While it would be ideal to raise the pii_check threshold to 94.5% or even + 100%, we do not have the resources to promise this. + +* It would also be nice to institute a "racheting" mechanism for the PII + annotation coverage threshold. That is, every commit to master could save the + coverage percentage to a persisted artifact, allowing subsequent PRs to + ensure that the pii_check never returns lower than the current threshold. We + will put this in the Aximprovements backlog, but we cannot commit to + implementing it right now. + +* We will not fix or apply amnestly in order to re-enable stlylint or + coverage-js. That could take significant effort, which we believe would be + better spent completing the migration off of this legacy Sass and JS and onto + our modern React frontends. + + +.. _fixing and re-enabling these suites: https://github.com/openedx/edx-platform/issues/35956 +.. _which we have been trying to move off of: https://github.com/openedx/edx-platform/issues/34467 +.. _announced on the forums: https://discuss.openedx.org/t/checking-pii-annotations-with-a-lower-coverage-threshold/14254 +.. _OEP-30: https://docs.openedx.org/projects/openedx-proposals/en/latest/architectural-decisions/oep-0030-arch-pii-markup-and-auditing.html +.. _fix the malformed annotations: https://github.com/openedx/edx-proctoring/issues/1241 From 930989e5e6dbd2c5253ec3d05652e55c43379725 Mon Sep 17 00:00:00 2001 From: Muhammad Adeel Tajamul <77053848+muhammadadeeltajamul@users.noreply.github.com> Date: Thu, 19 Dec 2024 16:57:27 +0500 Subject: [PATCH 15/20] fix: fixed notification generated event for grouped notifications (#36048) --- openedx/core/djangoapps/notifications/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openedx/core/djangoapps/notifications/tasks.py b/openedx/core/djangoapps/notifications/tasks.py index 75ad3f1ecd0b..ddd690cf996b 100644 --- a/openedx/core/djangoapps/notifications/tasks.py +++ b/openedx/core/djangoapps/notifications/tasks.py @@ -187,7 +187,7 @@ def send_notifications(user_ids, course_key: str, app_name, notification_type, c group_user_notifications(new_notification, existing_notifications[user_id]) else: notifications.append(new_notification) - generated_notification_audience.append(user_id) + generated_notification_audience.append(user_id) # send notification to users but use bulk_create notification_objects = Notification.objects.bulk_create(notifications) From 85a5890dd1f14a585b024da03954dca6f7a95aa9 Mon Sep 17 00:00:00 2001 From: Ahtisham Shahid Date: Thu, 19 Dec 2024 19:04:16 +0500 Subject: [PATCH 16/20] feat: added api to update all notification preferences for user (#35795) --- .../djangoapps/notifications/email/utils.py | 22 +- .../djangoapps/notifications/serializers.py | 116 ++++++- .../notifications/tests/test_utils.py | 288 ++++++++++++++++ .../notifications/tests/test_views.py | 315 +++++++++++++++++- openedx/core/djangoapps/notifications/urls.py | 14 +- .../core/djangoapps/notifications/utils.py | 115 ++++++- .../core/djangoapps/notifications/views.py | 155 ++++++++- 7 files changed, 1001 insertions(+), 24 deletions(-) create mode 100644 openedx/core/djangoapps/notifications/tests/test_utils.py diff --git a/openedx/core/djangoapps/notifications/email/utils.py b/openedx/core/djangoapps/notifications/email/utils.py index 9fd761785e5a..34c245308785 100644 --- a/openedx/core/djangoapps/notifications/email/utils.py +++ b/openedx/core/djangoapps/notifications/email/utils.py @@ -9,7 +9,7 @@ from django.contrib.auth import get_user_model from django.shortcuts import get_object_or_404 from pytz import utc -from waffle import get_waffle_flag_model # pylint: disable=invalid-django-waffle-import +from waffle import get_waffle_flag_model # pylint: disable=invalid-django-waffle-import from common.djangoapps.student.models import CourseEnrollment from lms.djangoapps.branding.api import get_logo_url_for_email @@ -29,7 +29,6 @@ from .notification_icons import NotificationTypeIcons - User = get_user_model() @@ -370,14 +369,6 @@ def is_name_match(name, param_name): """ return True if param_name is None else name == param_name - def is_editable(app_name, notification_type, channel): - """ - Returns if notification type channel is editable - """ - if notification_type == 'core': - return channel not in COURSE_NOTIFICATION_APPS[app_name]['non_editable'] - return channel not in COURSE_NOTIFICATION_TYPES[notification_type]['non_editable'] - def get_default_cadence_value(app_name, notification_type): """ Returns default email cadence value @@ -417,9 +408,18 @@ def get_updated_preference(pref): for channel in ['web', 'email', 'push']: if not is_name_match(channel, channel_value): continue - if is_editable(app_name, noti_type, channel): + if is_notification_type_channel_editable(app_name, noti_type, channel): type_prefs[channel] = pref_value if channel == 'email' and pref_value and type_prefs.get('email_cadence') == EmailCadence.NEVER: type_prefs['email_cadence'] = get_default_cadence_value(app_name, noti_type) preference.save() notification_preference_unsubscribe_event(user) + + +def is_notification_type_channel_editable(app_name, notification_type, channel): + """ + Returns if notification type channel is editable + """ + if notification_type == 'core': + return channel not in COURSE_NOTIFICATION_APPS[app_name]['non_editable'] + return channel not in COURSE_NOTIFICATION_TYPES[notification_type]['non_editable'] diff --git a/openedx/core/djangoapps/notifications/serializers.py b/openedx/core/djangoapps/notifications/serializers.py index 79c8c4af9d13..80b1577b6355 100644 --- a/openedx/core/djangoapps/notifications/serializers.py +++ b/openedx/core/djangoapps/notifications/serializers.py @@ -1,6 +1,7 @@ """ Serializers for the notifications API. """ + from django.core.exceptions import ValidationError from rest_framework import serializers @@ -9,9 +10,12 @@ from openedx.core.djangoapps.notifications.models import ( CourseNotificationPreference, Notification, - get_notification_channels, get_additional_notification_channel_settings + get_additional_notification_channel_settings, + get_notification_channels ) + from .base_notification import COURSE_NOTIFICATION_APPS, COURSE_NOTIFICATION_TYPES, EmailCadence +from .email.utils import is_notification_type_channel_editable from .utils import remove_preferences_with_no_access @@ -202,3 +206,113 @@ class Meta: 'last_seen', 'created', ) + + +def validate_email_cadence(email_cadence: str) -> str: + """ + Validate email cadence value. + """ + if EmailCadence.get_email_cadence_value(email_cadence) is None: + raise ValidationError(f'{email_cadence} is not a valid email cadence.') + return email_cadence + + +def validate_notification_app(notification_app: str) -> str: + """ + Validate notification app value. + """ + if not COURSE_NOTIFICATION_APPS.get(notification_app): + raise ValidationError(f'{notification_app} is not a valid notification app.') + return notification_app + + +def validate_notification_app_enabled(notification_app: str) -> str: + """ + Validate notification app is enabled. + """ + + if COURSE_NOTIFICATION_APPS.get(notification_app) and COURSE_NOTIFICATION_APPS.get(notification_app)['enabled']: + return notification_app + raise ValidationError(f'{notification_app} is not a valid notification app.') + + +def validate_notification_type(notification_type: str) -> str: + """ + Validate notification type value. + """ + if not COURSE_NOTIFICATION_TYPES.get(notification_type): + raise ValidationError(f'{notification_type} is not a valid notification type.') + return notification_type + + +def validate_notification_channel(notification_channel: str) -> str: + """ + Validate notification channel value. + """ + valid_channels = set(get_notification_channels()) | set(get_additional_notification_channel_settings()) + if notification_channel not in valid_channels: + raise ValidationError(f'{notification_channel} is not a valid notification channel setting.') + return notification_channel + + +class UserNotificationPreferenceUpdateAllSerializer(serializers.Serializer): + """ + Serializer for user notification preferences update with custom field validators. + """ + notification_app = serializers.CharField( + required=True, + validators=[validate_notification_app, validate_notification_app_enabled] + ) + value = serializers.BooleanField(required=False) + notification_type = serializers.CharField( + required=True, + ) + notification_channel = serializers.CharField( + required=False, + validators=[validate_notification_channel] + ) + email_cadence = serializers.CharField( + required=False, + validators=[validate_email_cadence] + ) + + def validate(self, attrs): + """ + Cross-field validation for notification preference update. + """ + notification_app = attrs.get('notification_app') + notification_type = attrs.get('notification_type') + notification_channel = attrs.get('notification_channel') + email_cadence = attrs.get('email_cadence') + + # Validate email_cadence requirements + if email_cadence and not notification_type: + raise ValidationError({ + 'notification_type': 'notification_type is required for email_cadence.' + }) + + # Validate notification_channel requirements + if not email_cadence and notification_type and not notification_channel: + raise ValidationError({ + 'notification_channel': 'notification_channel is required for notification_type.' + }) + + # Validate notification type + if all([not COURSE_NOTIFICATION_TYPES.get(notification_type), notification_type != "core"]): + raise ValidationError(f'{notification_type} is not a valid notification type.') + + # Validate notification type and channel is editable + if notification_channel and notification_type: + if not is_notification_type_channel_editable( + notification_app, + notification_type, + notification_channel + ): + raise ValidationError({ + 'notification_channel': ( + f'{notification_channel} is not editable for notification type ' + f'{notification_type}.' + ) + }) + + return attrs diff --git a/openedx/core/djangoapps/notifications/tests/test_utils.py b/openedx/core/djangoapps/notifications/tests/test_utils.py new file mode 100644 index 000000000000..e300c63c1d77 --- /dev/null +++ b/openedx/core/djangoapps/notifications/tests/test_utils.py @@ -0,0 +1,288 @@ +""" +Test cases for the notification utility functions. +""" +import unittest + +from openedx.core.djangoapps.notifications.utils import aggregate_notification_configs + + +class TestAggregateNotificationConfigs(unittest.TestCase): + """ + Test cases for the aggregate_notification_configs function. + """ + + def test_empty_configs_list_returns_default(self): + """ + If the configs list is empty, the default config should be returned. + """ + default_config = [{ + "grading": { + "enabled": False, + "non_editable": {}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }] + + result = aggregate_notification_configs(default_config) + assert result == default_config[0] + + def test_enable_notification_type(self): + """ + If a config enables a notification type, it should be enabled in the result. + """ + + config_list = [ + { + "grading": { + "enabled": False, + "non_editable": {}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + } + } + }, + { + "grading": { + "enabled": True, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Weekly" + } + } + } + }] + + result = aggregate_notification_configs(config_list) + assert result["grading"]["enabled"] is True + assert result["grading"]["notification_types"]["core"]["web"] is True + assert result["grading"]["notification_types"]["core"]["push"] is True + assert result["grading"]["notification_types"]["core"]["email"] is True + # Use default email_cadence + assert result["grading"]["notification_types"]["core"]["email_cadence"] == "Weekly" + + def test_merge_core_notification_types(self): + """ + Core notification types should be merged across configs. + """ + + config_list = [ + { + "discussion": { + "enabled": True, + "core_notification_types": ["new_comment"], + "notification_types": {} + } + }, + { + "discussion": { + "core_notification_types": ["new_response", "new_comment"] + } + + }] + + result = aggregate_notification_configs(config_list) + assert set(result["discussion"]["core_notification_types"]) == { + "new_comment", "new_response" + } + + def test_multiple_configs_aggregate(self): + """ + Multiple configs should be aggregated together. + """ + + config_list = [ + { + "updates": { + "enabled": False, + "notification_types": { + "course_updates": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + } + } + }, + { + "updates": { + "enabled": True, + "notification_types": { + "course_updates": { + "web": True, + "email_cadence": "Weekly" + } + } + } + }, + { + "updates": { + "notification_types": { + "course_updates": { + "push": True, + "email_cadence": "Weekly" + } + } + } + } + ] + + result = aggregate_notification_configs(config_list) + assert result["updates"]["enabled"] is True + assert result["updates"]["notification_types"]["course_updates"]["web"] is True + assert result["updates"]["notification_types"]["course_updates"]["push"] is True + assert result["updates"]["notification_types"]["course_updates"]["email"] is False + # Use default email_cadence + assert result["updates"]["notification_types"]["course_updates"]["email_cadence"] == "Weekly" + + def test_ignore_unknown_notification_types(self): + """ + Unknown notification types should be ignored. + """ + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }, + { + "grading": { + "notification_types": { + "unknown_type": { + "web": True, + "push": True, + "email": True + } + } + } + }] + + result = aggregate_notification_configs(config_list) + assert "unknown_type" not in result["grading"]["notification_types"] + assert result["grading"]["notification_types"]["core"]["web"] is False + + def test_ignore_unknown_categories(self): + """ + Unknown categories should be ignored. + """ + + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": {} + } + }, + { + "unknown_category": { + "enabled": True, + "notification_types": {} + } + }] + + result = aggregate_notification_configs(config_list) + assert "unknown_category" not in result + assert result["grading"]["enabled"] is False + + def test_preserves_default_structure(self): + """ + The resulting config should have the same structure as the default config. + """ + + config_list = [ + { + "discussion": { + "enabled": False, + "non_editable": {"core": ["web"]}, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Weekly" + } + }, + "core_notification_types": [] + } + }, + { + "discussion": { + "enabled": True, + "extra_field": "should_not_appear" + } + } + ] + + result = aggregate_notification_configs(config_list) + assert set(result["discussion"].keys()) == { + "enabled", "non_editable", "notification_types", "core_notification_types" + } + assert "extra_field" not in result["discussion"] + + def test_if_email_cadence_has_diff_set_mix_as_value(self): + """ + If email_cadence is different in the configs, set it to "Mixed". + """ + config_list = [ + { + "grading": { + "enabled": False, + "notification_types": { + "core": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + } + } + }, + { + "grading": { + "enabled": True, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Weekly" + } + } + } + }, + { + "grading": { + "notification_types": { + "core": { + "email_cadence": "Monthly" + } + } + } + } + ] + + result = aggregate_notification_configs(config_list) + assert result["grading"]["notification_types"]["core"]["email_cadence"] == "Mixed" diff --git a/openedx/core/djangoapps/notifications/tests/test_views.py b/openedx/core/djangoapps/notifications/tests/test_views.py index 70e6fbc5739c..3e73c95be5be 100644 --- a/openedx/core/djangoapps/notifications/tests/test_views.py +++ b/openedx/core/djangoapps/notifications/tests/test_views.py @@ -2,11 +2,14 @@ Tests for the views in the notifications app. """ import json +from copy import deepcopy from datetime import datetime, timedelta from unittest import mock +from unittest.mock import patch import ddt from django.conf import settings +from django.contrib.auth import get_user_model from django.test.utils import override_settings from django.urls import reverse from edx_toggles.toggles.testutils import override_waffle_flag @@ -27,19 +30,21 @@ FORUM_ROLE_MODERATOR ) from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NOTIFICATIONS +from openedx.core.djangoapps.notifications.email.utils import encrypt_object, encrypt_string from openedx.core.djangoapps.notifications.models import ( CourseNotificationPreference, Notification, get_course_notification_preference_config_version ) from openedx.core.djangoapps.notifications.serializers import NotificationCourseEnrollmentSerializer -from openedx.core.djangoapps.notifications.email.utils import encrypt_object, encrypt_string from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from ..base_notification import COURSE_NOTIFICATION_APPS, COURSE_NOTIFICATION_TYPES, NotificationAppManager from ..utils import get_notification_types_with_visibility_settings +User = get_user_model() + @ddt.ddt class CourseEnrollmentListViewTest(ModuleStoreTestCase): @@ -903,6 +908,7 @@ class UpdatePreferenceFromEncryptedDataView(ModuleStoreTestCase): """ Tests if preference is updated when encrypted url is hit """ + def setUp(self): """ Setup test case @@ -968,3 +974,310 @@ def remove_notifications_with_visibility_settings(expected_response): notification_type ) return expected_response + + +class UpdateAllNotificationPreferencesViewTests(APITestCase): + """ + Tests for the UpdateAllNotificationPreferencesView. + """ + + def setUp(self): + # Create test user + self.user = User.objects.create_user( + username='testuser', + password='testpass123' + ) + self.client = APIClient() + self.client.force_authenticate(user=self.user) + self.url = reverse('update-all-notification-preferences') + + # Complex notification config structure + self.base_config = { + "grading": { + "enabled": True, + "non_editable": {}, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "ora_staff_notification": { + "web": False, + "push": False, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [] + }, + "updates": { + "enabled": True, + "non_editable": {}, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "course_updates": { + "web": True, + "push": True, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [] + }, + "discussion": { + "enabled": True, + "non_editable": { + "core": ["web"] + }, + "notification_types": { + "core": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "content_reported": { + "web": True, + "push": True, + "email": True, + "email_cadence": "Daily" + }, + "new_question_post": { + "web": True, + "push": False, + "email": False, + "email_cadence": "Daily" + }, + "new_discussion_post": { + "web": True, + "push": False, + "email": False, + "email_cadence": "Daily" + } + }, + "core_notification_types": [ + "new_comment_on_response", + "new_comment", + "new_response", + "response_on_followed_post", + "comment_on_followed_post", + "response_endorsed_on_thread", + "response_endorsed" + ] + } + } + + # Create test notification preferences + self.preferences = [] + for i in range(3): + pref = CourseNotificationPreference.objects.create( + user=self.user, + course_id=f'course-v1:TestX+Test{i}+2024', + notification_preference_config=deepcopy(self.base_config), + is_active=True + ) + self.preferences.append(pref) + + # Create an inactive preference + self.inactive_pref = CourseNotificationPreference.objects.create( + user=self.user, + course_id='course-v1:TestX+Inactive+2024', + notification_preference_config=deepcopy(self.base_config), + is_active=False + ) + + def test_update_discussion_notification(self): + """ + Test updating discussion notification settings + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'content_reported', + 'notification_channel': 'push', + 'value': False + } + + response = self.client.post(self.url, data, format='json') + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + self.assertEqual(response.data['data']['total_updated'], 3) + + # Verify database updates + for pref in CourseNotificationPreference.objects.filter(is_active=True): + self.assertFalse( + pref.notification_preference_config['discussion']['notification_types']['content_reported']['push'] + ) + + def test_update_non_editable_field(self): + """ + Test attempting to update a non-editable field + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'web', + 'value': False + } + + response = self.client.post(self.url, data, format='json') + + # Should fail because 'web' is non-editable for 'core' in discussion + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data['status'], 'error') + + # Verify database remains unchanged + for pref in CourseNotificationPreference.objects.filter(is_active=True): + self.assertTrue( + pref.notification_preference_config['discussion']['notification_types']['core']['web'] + ) + + def test_update_email_cadence(self): + """ + Test updating email cadence setting + """ + data = { + 'notification_app': 'discussion', + 'notification_type': 'content_reported', + 'email_cadence': 'Weekly' + } + + response = self.client.post(self.url, data, format='json') + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + + # Verify database updates + for pref in CourseNotificationPreference.objects.filter(is_active=True): + notification_type = pref.notification_preference_config['discussion']['notification_types'][ + 'content_reported'] + self.assertEqual( + notification_type['email_cadence'], + 'Weekly' + ) + + @patch.dict('openedx.core.djangoapps.notifications.serializers.COURSE_NOTIFICATION_APPS', { + **COURSE_NOTIFICATION_APPS, + 'grading': { + 'enabled': False, + 'core_info': 'Notifications for submission grading.', + 'core_web': True, + 'core_email': True, + 'core_push': True, + 'core_email_cadence': 'Daily', + 'non_editable': [] + } + }) + def test_update_disabled_app(self): + """ + Test updating notification for a disabled app + """ + # Disable the grading app in all preferences + for pref in self.preferences: + config = pref.notification_preference_config + config['grading']['enabled'] = False + pref.notification_preference_config = config + pref.save() + + data = { + 'notification_app': 'grading', + 'notification_type': 'core', + 'notification_channel': 'email', + 'value': False + } + response = self.client.post(self.url, data, format='json') + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.data['status'], 'error') + + def test_invalid_serializer_data(self): + """ + Test handling of invalid input data + """ + test_cases = [ + { + 'notification_app': 'invalid_app', + 'notification_type': 'core', + 'notification_channel': 'push', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'invalid_type', + 'notification_channel': 'push', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'invalid_channel', + 'value': False + }, + { + 'notification_app': 'discussion', + 'notification_type': 'core', + 'notification_channel': 'email_cadence', + 'value': 'Invalid_Cadence' + } + ] + + for test_case in test_cases: + response = self.client.post(self.url, test_case, format='json') + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + +class GetAggregateNotificationPreferencesTest(APITestCase): + """ + Tests for the GetAggregateNotificationPreferences API view. + """ + + def setUp(self): + # Set up a user and API client + self.user = User.objects.create_user(username='testuser', password='testpass') + self.client = APIClient() + self.client.force_authenticate(user=self.user) + self.url = reverse('notification-preferences-aggregated') # Adjust with the actual name + + def test_no_active_notification_preferences(self): + """ + Test case: No active notification preferences found for the user + """ + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + self.assertEqual(response.data['status'], 'error') + self.assertEqual(response.data['message'], 'No active notification preferences found') + + @patch('openedx.core.djangoapps.notifications.views.aggregate_notification_configs') + def test_with_active_notification_preferences(self, mock_aggregate): + """ + Test case: Active notification preferences found for the user + """ + # Mock aggregate_notification_configs for a controlled output + mock_aggregate.return_value = {'mocked': 'data'} + + # Create active notification preferences for the user + CourseNotificationPreference.objects.create( + user=self.user, + is_active=True, + notification_preference_config={'example': 'config'} + ) + + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data['status'], 'success') + self.assertEqual(response.data['message'], 'Notification preferences retrieved') + self.assertEqual(response.data['data'], {'mocked': 'data'}) + + def test_unauthenticated_user(self): + """ + Test case: Request without authentication + """ + # Test case: Request without authentication + self.client.logout() # Remove authentication + response = self.client.get(self.url) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) diff --git a/openedx/core/djangoapps/notifications/urls.py b/openedx/core/djangoapps/notifications/urls.py index 7f611bc2c4ca..9892fa72de0d 100644 --- a/openedx/core/djangoapps/notifications/urls.py +++ b/openedx/core/djangoapps/notifications/urls.py @@ -11,13 +11,13 @@ NotificationCountView, NotificationListAPIView, NotificationReadAPIView, + UpdateAllNotificationPreferencesView, UserNotificationPreferenceView, - preference_update_from_encrypted_username_view, + preference_update_from_encrypted_username_view, AggregatedNotificationPreferences ) router = routers.DefaultRouter() - urlpatterns = [ path('enrollments/', CourseEnrollmentListView.as_view(), name='enrollment-list'), re_path( @@ -25,6 +25,11 @@ UserNotificationPreferenceView.as_view(), name='notification-preferences' ), + path( + 'configurations/', + AggregatedNotificationPreferences.as_view(), + name='notification-preferences-aggregated' + ), path('', NotificationListAPIView.as_view(), name='notifications-list'), path('count/', NotificationCountView.as_view(), name='notifications-count'), path( @@ -35,6 +40,11 @@ path('read/', NotificationReadAPIView.as_view(), name='notifications-read'), path('preferences/update///', preference_update_from_encrypted_username_view, name='preference_update_from_encrypted_username_view'), + path( + 'preferences/update-all/', + UpdateAllNotificationPreferencesView.as_view(), + name='update-all-notification-preferences' + ), ] urlpatterns += router.urls diff --git a/openedx/core/djangoapps/notifications/utils.py b/openedx/core/djangoapps/notifications/utils.py index fa948dcf425e..c05837b5f15f 100644 --- a/openedx/core/djangoapps/notifications/utils.py +++ b/openedx/core/djangoapps/notifications/utils.py @@ -1,11 +1,12 @@ """ Utils function for notifications app """ -from typing import Dict, List +import copy +from typing import Dict, List, Set from common.djangoapps.student.models import CourseAccessRole, CourseEnrollment from openedx.core.djangoapps.django_comment_common.models import Role -from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NOTIFICATIONS, ENABLE_NEW_NOTIFICATION_VIEW +from openedx.core.djangoapps.notifications.config.waffle import ENABLE_NEW_NOTIFICATION_VIEW, ENABLE_NOTIFICATIONS from openedx.core.lib.cache_utils import request_cached @@ -158,3 +159,113 @@ def clean_arguments(kwargs): if kwargs.get('created', {}): clean_kwargs.update(kwargs.get('created')) return clean_kwargs + + +def update_notification_types( + app_config: Dict, + user_app_config: Dict, +) -> None: + """ + Update notification types for a specific category configuration. + """ + if "notification_types" not in user_app_config: + return + + for type_key, type_config in user_app_config["notification_types"].items(): + if type_key not in app_config["notification_types"]: + continue + + update_notification_fields( + app_config["notification_types"][type_key], + type_config, + ) + + +def update_notification_fields( + target_config: Dict, + source_config: Dict, +) -> None: + """ + Update individual notification fields (web, push, email) and email_cadence. + """ + for field in ["web", "push", "email"]: + if field in source_config: + target_config[field] |= source_config[field] + if "email_cadence" in source_config: + if isinstance(target_config["email_cadence"], str) or not target_config["email_cadence"]: + target_config["email_cadence"] = set() + + target_config["email_cadence"].add(source_config["email_cadence"]) + + +def update_core_notification_types(app_config: Dict, user_config: Dict) -> None: + """ + Update core notification types by merging existing and new types. + """ + if "core_notification_types" not in user_config: + return + + existing_types: Set = set(app_config.get("core_notification_types", [])) + existing_types.update(user_config["core_notification_types"]) + app_config["core_notification_types"] = list(existing_types) + + +def process_app_config( + app_config: Dict, + user_config: Dict, + app: str, + default_config: Dict, +) -> None: + """ + Process a single category configuration against another config. + """ + if app not in user_config: + return + + user_app_config = user_config[app] + + # Update enabled status + app_config["enabled"] |= user_app_config.get("enabled", False) + + # Update core notification types + update_core_notification_types(app_config, user_app_config) + + # Update notification types + update_notification_types(app_config, user_app_config) + + +def aggregate_notification_configs(existing_user_configs: List[Dict]) -> Dict: + """ + Update default notification config with values from other configs. + Rules: + 1. Start with default config as base + 2. If any value is True in other configs, make it True + 3. Set email_cadence to "Mixed" if different cadences found, else use default + + Args: + existing_user_configs: List of notification config dictionaries to apply + + Returns: + Updated config following the same structure + """ + if not existing_user_configs: + return {} + + result_config = copy.deepcopy(existing_user_configs[0]) + apps = result_config.keys() + + for app in apps: + app_config = result_config[app] + + for user_config in existing_user_configs: + process_app_config(app_config, user_config, app, existing_user_configs[0]) + + # if email_cadence is mixed, set it to "Mixed" + for app in result_config: + for type_key, type_config in result_config[app]["notification_types"].items(): + if len(type_config["email_cadence"]) > 1: + result_config[app]["notification_types"][type_key]["email_cadence"] = "Mixed" + else: + result_config[app]["notification_types"][type_key]["email_cadence"] = ( + result_config[app]["notification_types"][type_key]["email_cadence"].pop()) + return result_config diff --git a/openedx/core/djangoapps/notifications/views.py b/openedx/core/djangoapps/notifications/views.py index e87274088f84..8e41b11554c8 100644 --- a/openedx/core/djangoapps/notifications/views.py +++ b/openedx/core/djangoapps/notifications/views.py @@ -1,9 +1,11 @@ """ Views for the notifications API. """ +import copy from datetime import datetime, timedelta from django.conf import settings +from django.db import transaction from django.db.models import Count from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as _ @@ -17,10 +19,7 @@ from common.djangoapps.student.models import CourseEnrollment from openedx.core.djangoapps.notifications.email.utils import update_user_preferences_from_patch -from openedx.core.djangoapps.notifications.models import ( - CourseNotificationPreference, - get_course_notification_preference_config_version -) +from openedx.core.djangoapps.notifications.models import get_course_notification_preference_config_version from openedx.core.djangoapps.notifications.permissions import allow_any_authenticated_user from .base_notification import COURSE_NOTIFICATION_APPS @@ -32,14 +31,15 @@ notification_tray_opened_event, notifications_app_all_read_event ) -from .models import Notification +from .models import CourseNotificationPreference, Notification from .serializers import ( NotificationCourseEnrollmentSerializer, NotificationSerializer, UserCourseNotificationPreferenceSerializer, - UserNotificationPreferenceUpdateSerializer, + UserNotificationPreferenceUpdateAllSerializer, + UserNotificationPreferenceUpdateSerializer ) -from .utils import get_show_notifications_tray, get_is_new_notification_view_enabled +from .utils import get_is_new_notification_view_enabled, get_show_notifications_tray, aggregate_notification_configs @allow_any_authenticated_user() @@ -444,3 +444,144 @@ def preference_update_from_encrypted_username_view(request, username, patch): """ update_user_preferences_from_patch(username, patch) return Response({"result": "success"}, status=status.HTTP_200_OK) + + +@allow_any_authenticated_user() +class UpdateAllNotificationPreferencesView(APIView): + """ + API view for updating all notification preferences for the current user. + """ + + def post(self, request): + """ + Update all notification preferences for the current user. + """ + # check if request have required params + serializer = UserNotificationPreferenceUpdateAllSerializer(data=request.data) + if not serializer.is_valid(): + return Response({ + 'status': 'error', + 'message': serializer.errors + }, status=status.HTTP_400_BAD_REQUEST) + # check if required config is not editable + try: + with transaction.atomic(): + # Get all active notification preferences for the current user + notification_preferences = ( + CourseNotificationPreference.objects + .select_for_update() + .filter( + user=request.user, + is_active=True + ) + ) + + if not notification_preferences.exists(): + return Response({ + 'status': 'error', + 'message': 'No active notification preferences found' + }, status=status.HTTP_404_NOT_FOUND) + + data = serializer.validated_data + app = data['notification_app'] + email_cadence = data.get('email_cadence', None) + channel = data.get('notification_channel', 'email_cadence' if email_cadence else None) + notification_type = data['notification_type'] + value = data.get('value', email_cadence if email_cadence else None) + + updated_courses = [] + errors = [] + + # Update each preference + for preference in notification_preferences: + try: + # Create a deep copy of the current config + updated_config = copy.deepcopy(preference.notification_preference_config) + + # Check if the path exists and update the value + if ( + updated_config.get(app, {}) + .get('notification_types', {}) + .get(notification_type, {}) + .get(channel) + ) is not None: + + # Update the specific setting in the config + updated_config[app]['notification_types'][notification_type][channel] = value + + # Update the notification preference + preference.notification_preference_config = updated_config + preference.save() + + updated_courses.append({ + 'course_id': str(preference.course_id), + 'current_setting': updated_config[app]['notification_types'][notification_type] + }) + else: + errors.append({ + 'course_id': str(preference.course_id), + 'error': f'Invalid path: {app}.notification_types.{notification_type}.{channel}' + }) + + except Exception as e: + errors.append({ + 'course_id': str(preference.course_id), + 'error': str(e) + }) + + response_data = { + 'status': 'success' if updated_courses else 'partial_success' if errors else 'error', + 'message': 'Notification preferences update completed', + 'data': { + 'updated_value': value, + 'notification_type': notification_type, + 'channel': channel, + 'app': app, + 'successfully_updated_courses': updated_courses, + 'total_updated': len(updated_courses), + 'total_courses': notification_preferences.count() + } + } + + if errors: + response_data['errors'] = errors + + return Response( + response_data, + status=status.HTTP_200_OK if updated_courses else status.HTTP_400_BAD_REQUEST + ) + + except Exception as e: + return Response({ + 'status': 'error', + 'message': str(e) + }, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + +@allow_any_authenticated_user() +class AggregatedNotificationPreferences(APIView): + """ + API view for getting the aggregate notification preferences for the current user. + """ + + def get(self, request): + """ + API view for getting the aggregate notification preferences for the current user. + """ + notification_preferences = CourseNotificationPreference.objects.filter(user=request.user, is_active=True) + + if not notification_preferences.exists(): + return Response({ + 'status': 'error', + 'message': 'No active notification preferences found' + }, status=status.HTTP_404_NOT_FOUND) + notification_configs = notification_preferences.values_list('notification_preference_config', flat=True) + notification_configs = aggregate_notification_configs( + notification_configs + ) + + return Response({ + 'status': 'success', + 'message': 'Notification preferences retrieved', + 'data': notification_configs + }, status=status.HTTP_200_OK) From b391fb54002d29dd53d2b48ef6c758ff96efa998 Mon Sep 17 00:00:00 2001 From: "Kyle D. McCormick" Date: Thu, 19 Dec 2024 14:16:57 -0500 Subject: [PATCH 17/20] build: unpin django-stubs now that we're on django >=4.2 --- requirements/constraints.txt | 7 ------- 1 file changed, 7 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e65e19a574fc..78a5f83a787c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -60,13 +60,6 @@ django-webpack-loader==0.7.0 # Adding pin to avoid any major upgrade djangorestframework<3.15.0 -# Date: 2023-07-19 -# The version of django-stubs we can use depends on which Django release we're using -# 1.16.0 works with Django 3.2 through 4.1 -# Issue for unpinning: https://github.com/openedx/edx-platform/issues/35275 -django-stubs==1.16.0 -djangorestframework-stubs==3.14.0 # Pinned to match django-stubs. Remove this when we can remove the above pin. - # Date: 2024-07-23 # django-storages==1.14.4 breaks course imports # Two lines were added in 1.14.4 that make file_exists_in_storage function always return False, From edbcadae8e20202b7fcaa2c81d62214f6e88e02e Mon Sep 17 00:00:00 2001 From: Kyle McCormick Date: Thu, 19 Dec 2024 15:20:53 -0500 Subject: [PATCH 18/20] revert: build: unpin django-stubs now that we're on django >=4.2 (#36051) This reverts commit b391fb54002d29dd53d2b48ef6c758ff96efa998, which was unintentionally pushed to master instead of a PR. --- requirements/constraints.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 78a5f83a787c..e65e19a574fc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -60,6 +60,13 @@ django-webpack-loader==0.7.0 # Adding pin to avoid any major upgrade djangorestframework<3.15.0 +# Date: 2023-07-19 +# The version of django-stubs we can use depends on which Django release we're using +# 1.16.0 works with Django 3.2 through 4.1 +# Issue for unpinning: https://github.com/openedx/edx-platform/issues/35275 +django-stubs==1.16.0 +djangorestframework-stubs==3.14.0 # Pinned to match django-stubs. Remove this when we can remove the above pin. + # Date: 2024-07-23 # django-storages==1.14.4 breaks course imports # Two lines were added in 1.14.4 that make file_exists_in_storage function always return False, From 76cceaa29835422e44b8575e598757d37f4680e4 Mon Sep 17 00:00:00 2001 From: jawad khan Date: Fri, 20 Dec 2024 15:23:47 +0500 Subject: [PATCH 19/20] feat: Populate notification context with post, comment and response ids (#36008) * feat: Populate notification context with post, comment and response ids --- .../rest_api/discussions_notifications.py | 33 ++++++++++---- .../discussion/rest_api/tests/test_tasks.py | 45 ++++++++++++++++--- .../discussion/rest_api/tests/utils.py | 3 +- 3 files changed, 66 insertions(+), 15 deletions(-) diff --git a/lms/djangoapps/discussion/rest_api/discussions_notifications.py b/lms/djangoapps/discussion/rest_api/discussions_notifications.py index bd12e82adc50..f6572c829c7a 100644 --- a/lms/djangoapps/discussion/rest_api/discussions_notifications.py +++ b/lms/djangoapps/discussion/rest_api/discussions_notifications.py @@ -117,6 +117,7 @@ def send_new_response_notification(self): context = { 'email_content': clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "new_response", extra_context=context) def _response_and_thread_has_same_creator(self) -> bool: @@ -156,6 +157,7 @@ def send_new_comment_notification(self): "author_pronoun": str(author_pronoun), "email_content": clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "new_comment", extra_context=context) def send_new_comment_on_response_notification(self): @@ -171,6 +173,7 @@ def send_new_comment_on_response_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body), } + self._populate_context_with_ids_for_mobile(context) self._send_notification( [self.parent_response.user_id], "new_comment_on_response", @@ -216,12 +219,15 @@ def send_response_on_followed_post_notification(self): # Remove duplicate users from the list of users to send notification users = list(set(users)) if not self.parent_id: + context = { + "email_content": clean_thread_html_body(self.comment.body), + } + self._populate_context_with_ids_for_mobile(context) self._send_notification( users, "response_on_followed_post", - extra_context={ - "email_content": clean_thread_html_body(self.comment.body), - }) + extra_context=context + ) else: author_name = f"{self.parent_response.username}'s" # use 'their' if comment author is also response author. @@ -231,14 +237,16 @@ def send_response_on_followed_post_notification(self): if self._response_and_comment_has_same_creator() else f"{self.parent_response.username}'s" ) + context = { + "author_name": str(author_name), + "author_pronoun": str(author_pronoun), + "email_content": clean_thread_html_body(self.comment.body), + } + self._populate_context_with_ids_for_mobile(context) self._send_notification( users, "comment_on_followed_post", - extra_context={ - "author_name": str(author_name), - "author_pronoun": str(author_pronoun), - "email_content": clean_thread_html_body(self.comment.body), - } + extra_context=context ) def _create_cohort_course_audience(self): @@ -290,6 +298,7 @@ def send_response_endorsed_on_thread_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body) } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.thread.user_id], "response_endorsed_on_thread", extra_context=context) def send_response_endorsed_notification(self): @@ -299,6 +308,7 @@ def send_response_endorsed_notification(self): context = { "email_content": clean_thread_html_body(self.comment.body) } + self._populate_context_with_ids_for_mobile(context) self._send_notification([self.creator.id], "response_endorsed", extra_context=context) def send_new_thread_created_notification(self): @@ -330,6 +340,7 @@ def send_new_thread_created_notification(self): 'post_title': self.thread.title, "email_content": clean_thread_html_body(self.thread.body), } + self._populate_context_with_ids_for_mobile(context) self._send_course_wide_notification(notification_type, audience_filters, context) def send_reported_content_notification(self): @@ -362,6 +373,12 @@ def send_reported_content_notification(self): ]} self._send_course_wide_notification("content_reported", audience_filters, context) + def _populate_context_with_ids_for_mobile(self, context): + context['thread_id'] = self.thread.id + context['topic_id'] = self.thread.commentable_id + context['comment_id'] = self.comment_id + context['parent_id'] = self.parent_id + def is_discussion_cohorted(course_key_str): """ diff --git a/lms/djangoapps/discussion/rest_api/tests/test_tasks.py b/lms/djangoapps/discussion/rest_api/tests/test_tasks.py index 6aff0673cc73..8171ca7e6a71 100644 --- a/lms/djangoapps/discussion/rest_api/tests/test_tasks.py +++ b/lms/djangoapps/discussion/rest_api/tests/test_tasks.py @@ -305,6 +305,8 @@ def setUp(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, + }) self._register_subscriptions_endpoint() @@ -354,7 +356,11 @@ def test_send_notification_to_thread_creator(self): 'post_title': 'test thread', 'email_content': self.comment.body, 'course_name': self.course.display_name, - 'sender_id': self.user_2.id + 'sender_id': self.user_2.id, + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args.context, expected_context) self.assertEqual( @@ -400,7 +406,11 @@ def test_send_notification_to_parent_threads(self): 'author_name': 'dummy\'s', 'author_pronoun': 'dummy\'s', 'course_name': self.course.display_name, - 'sender_id': self.user_3.id + 'sender_id': self.user_3.id, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment.context, expected_context) self.assertEqual( @@ -417,7 +427,11 @@ def test_send_notification_to_parent_threads(self): 'post_title': self.thread.title, 'email_content': self.comment.body, 'course_name': self.course.display_name, - 'sender_id': self.user_3.id + 'sender_id': self.user_3.id, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment_on_response.context, expected_context) self.assertEqual( @@ -477,7 +491,11 @@ def test_comment_creators_own_response(self): 'author_pronoun': 'your', 'course_name': self.course.display_name, 'sender_id': self.user_3.id, - 'email_content': self.comment.body + 'email_content': self.comment.body, + 'parent_id': 2, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } self.assertDictEqual(args_comment.context, expected_context) self.assertEqual( @@ -522,6 +540,10 @@ def test_send_notification_to_followers(self, parent_id, notification_type): 'email_content': self.comment.body, 'course_name': self.course.display_name, 'sender_id': self.user_2.id, + 'parent_id': parent_id, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 4, } if parent_id: expected_context['author_name'] = 'dummy\'s' @@ -624,6 +646,8 @@ def test_new_comment_notification(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, + }) self.register_get_comment_response({ 'id': response.id, @@ -707,6 +731,7 @@ def test_response_endorsed_notifications(self): "username": thread.username, "thread_type": 'discussion', "title": thread.title, + "commentable_id": thread.commentable_id, }) self.register_get_comment_response({ 'id': 1, @@ -734,7 +759,11 @@ def test_response_endorsed_notifications(self): 'post_title': 'test thread', 'course_name': self.course.display_name, 'sender_id': int(self.user_2.id), - 'email_content': 'dummy' + 'email_content': 'dummy', + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 2, } self.assertDictEqual(notification_data.context, expected_context) self.assertEqual(notification_data.content_url, _get_mfe_url(self.course.id, thread.id)) @@ -752,7 +781,11 @@ def test_response_endorsed_notifications(self): 'post_title': 'test thread', 'course_name': self.course.display_name, 'sender_id': int(response.user_id), - 'email_content': 'dummy' + 'email_content': 'dummy', + 'parent_id': None, + 'topic_id': None, + 'thread_id': 1, + 'comment_id': 2, } self.assertDictEqual(notification_data.context, expected_context) self.assertEqual(notification_data.content_url, _get_mfe_url(self.course.id, thread.id)) diff --git a/lms/djangoapps/discussion/rest_api/tests/utils.py b/lms/djangoapps/discussion/rest_api/tests/utils.py index 27e34705f5df..496f8723acfb 100644 --- a/lms/djangoapps/discussion/rest_api/tests/utils.py +++ b/lms/djangoapps/discussion/rest_api/tests/utils.py @@ -675,13 +675,14 @@ class ThreadMock(object): A mock thread object """ - def __init__(self, thread_id, creator, title, parent_id=None, body=''): + def __init__(self, thread_id, creator, title, parent_id=None, body='', commentable_id=None): self.id = thread_id self.user_id = str(creator.id) self.username = creator.username self.title = title self.parent_id = parent_id self.body = body + self.commentable_id = commentable_id def url_with_id(self, params): return f"http://example.com/{params['id']}" From 644e7e786af351231eb09bdba8d5599a979894d2 Mon Sep 17 00:00:00 2001 From: Kyle McCormick Date: Fri, 20 Dec 2024 15:02:33 -0500 Subject: [PATCH 20/20] build!: requirements/edx-sandbox/py38.txt (#36049) This was a temporary backcompat alias to requirements/edx-sandbox/quince.txt. Developers relying on this file to run a python3.8 sandbox should: * first, switch to requirements/edx-sandbox/quince.txt, which works with * python3.8 and then, upgrade their codejail sandbox to a newer `.txt` * since python3.8 is EOL. See the readme in this directory for more details. --- requirements/edx-sandbox/py38.txt | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 requirements/edx-sandbox/py38.txt diff --git a/requirements/edx-sandbox/py38.txt b/requirements/edx-sandbox/py38.txt deleted file mode 100644 index 5164c3975a12..000000000000 --- a/requirements/edx-sandbox/py38.txt +++ /dev/null @@ -1,4 +0,0 @@ -# This file is a temporary compatibility wrapper around quince.txt. -# It will be removed before Sumac. - --r releases/quince.txt