diff --git a/api/app_analytics/analytics_db_service.py b/api/app_analytics/analytics_db_service.py index d8e6effd0b41..5407086f2c49 100644 --- a/api/app_analytics/analytics_db_service.py +++ b/api/app_analytics/analytics_db_service.py @@ -1,4 +1,4 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta from typing import List from app_analytics.dataclasses import FeatureEvaluationData, UsageData @@ -14,34 +14,103 @@ FeatureEvaluationBucket, Resource, ) +from dateutil.relativedelta import relativedelta from django.conf import settings from django.db.models import Sum from django.utils import timezone from environments.models import Environment from features.models import Feature +from organisations.models import Organisation -ANALYTICS_READ_BUCKET_SIZE = 15 +from . import constants +from .types import PERIOD_TYPE def get_usage_data( - organisation, environment_id=None, project_id=None -) -> List[UsageData]: + organisation: Organisation, + environment_id: int | None = None, + project_id: int | None = None, + period: PERIOD_TYPE | None = None, +) -> list[UsageData]: + now = timezone.now() + + date_stop = date_start = None + period_starts_at = period_ends_at = None + + match period: + case constants.CURRENT_BILLING_PERIOD: + if not getattr(organisation, "subscription_information_cache", None): + return [] + sub_cache = organisation.subscription_information_cache + starts_at = sub_cache.current_billing_term_starts_at + month_delta = relativedelta(now, starts_at).months + period_starts_at = relativedelta(months=month_delta) + starts_at + period_ends_at = now + date_start = f"-{(now - period_starts_at).days}d" + date_stop = "now()" + + case constants.PREVIOUS_BILLING_PERIOD: + if not getattr(organisation, "subscription_information_cache", None): + return [] + sub_cache = organisation.subscription_information_cache + starts_at = sub_cache.current_billing_term_starts_at + month_delta = relativedelta(now, starts_at).months - 1 + month_delta += relativedelta(now, starts_at).years * 12 + period_starts_at = relativedelta(months=month_delta) + starts_at + period_ends_at = relativedelta(months=month_delta + 1) + starts_at + date_start = f"-{(now - period_starts_at).days}d" + date_stop = f"-{(now - period_ends_at).days}d" + + case constants.NINETY_DAY_PERIOD: + period_starts_at = now - relativedelta(days=90) + period_ends_at = now + date_start = "-90d" + date_stop = "now()" + if settings.USE_POSTGRES_FOR_ANALYTICS: - return get_usage_data_from_local_db( - organisation, environment_id=environment_id, project_id=project_id - ) - return get_usage_data_from_influxdb( - organisation.id, environment_id=environment_id, project_id=project_id - ) + kwargs = { + "organisation": organisation, + "environment_id": environment_id, + "project_id": project_id, + } + + if period_starts_at: + assert period_ends_at + kwargs["date_start"] = period_starts_at + kwargs["date_stop"] = period_ends_at + + return get_usage_data_from_local_db(**kwargs) + + kwargs = { + "organisation_id": organisation.id, + "environment_id": environment_id, + "project_id": project_id, + } + + if date_start: + assert date_stop + kwargs["date_start"] = date_start + kwargs["date_stop"] = date_stop + + return get_usage_data_from_influxdb(**kwargs) def get_usage_data_from_local_db( - organisation, environment_id=None, project_id=None, period: int = 30 + organisation: Organisation, + environment_id: int | None = None, + project_id: int | None = None, + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> List[UsageData]: + if date_start is None: + date_start = timezone.now() - timedelta(days=30) + if date_stop is None: + date_stop = timezone.now() + qs = APIUsageBucket.objects.filter( environment_id__in=_get_environment_ids_for_org(organisation), - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, ) if project_id: qs = qs.filter(project_id=project_id) @@ -50,8 +119,8 @@ def get_usage_data_from_local_db( qs = ( qs.filter( - created_at__date__lte=timezone.now(), - created_at__date__gt=timezone.now() - timedelta(days=30), + created_at__date__lte=date_stop, + created_at__date__gt=date_start, ) .order_by("created_at__date") .values("created_at__date", "resource") @@ -80,7 +149,7 @@ def get_total_events_count(organisation) -> int: environment_id__in=_get_environment_ids_for_org(organisation), created_at__date__lte=date.today(), created_at__date__gt=date.today() - timedelta(days=30), - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, ).aggregate(total_count=Sum("total_count"))["total_count"] else: count = get_events_for_organisation(organisation.id) @@ -105,7 +174,7 @@ def get_feature_evaluation_data_from_local_db( feature_evaluation_data = ( FeatureEvaluationBucket.objects.filter( environment_id=environment_id, - bucket_size=ANALYTICS_READ_BUCKET_SIZE, + bucket_size=constants.ANALYTICS_READ_BUCKET_SIZE, feature_name=feature.name, created_at__date__lte=timezone.now(), created_at__date__gt=timezone.now() - timedelta(days=period), diff --git a/api/app_analytics/constants.py b/api/app_analytics/constants.py new file mode 100644 index 000000000000..27632a2b98c3 --- /dev/null +++ b/api/app_analytics/constants.py @@ -0,0 +1,6 @@ +ANALYTICS_READ_BUCKET_SIZE = 15 + +# get_usage_data() related period constants +CURRENT_BILLING_PERIOD = "current_billing_period" +PREVIOUS_BILLING_PERIOD = "previous_billing_period" +NINETY_DAY_PERIOD = "90_day_period" diff --git a/api/app_analytics/influxdb_wrapper.py b/api/app_analytics/influxdb_wrapper.py index 05ea0218b6e1..0e9f65bc1d6c 100644 --- a/api/app_analytics/influxdb_wrapper.py +++ b/api/app_analytics/influxdb_wrapper.py @@ -76,7 +76,7 @@ def write(self): @staticmethod def influx_query_manager( - date_range: str = "30d", + date_start: str = "-30d", date_stop: str = "now()", drop_columns: typing.Tuple[str, ...] = DEFAULT_DROP_COLUMNS, filters: str = "|> filter(fn:(r) => r._measurement == 'api_call')", @@ -88,7 +88,7 @@ def influx_query_manager( query = ( f'from(bucket:"{bucket}")' - f" |> range(start: -{date_range}, stop: {date_stop})" + f" |> range(start: {date_start}, stop: {date_stop})" f" {filters}" f" |> drop(columns: {drop_columns_input})" f"{extra}" @@ -103,7 +103,9 @@ def influx_query_manager( return [] -def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> int: +def get_events_for_organisation( + organisation_id: id, date_start: str = "-30d", date_stop: str = "now()" +) -> int: """ Query influx db for usage for given organisation id @@ -126,7 +128,8 @@ def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> "environment_id", ), extra="|> sum()", - date_range=date_range, + date_start=date_start, + date_stop=date_stop, ) total = 0 @@ -137,7 +140,9 @@ def get_events_for_organisation(organisation_id: id, date_range: str = "30d") -> return total -def get_event_list_for_organisation(organisation_id: int, date_range: str = "30d"): +def get_event_list_for_organisation( + organisation_id: int, date_start: str = "-30d", date_stop: str = "now()" +) -> tuple[dict[str, list[int]], list[str]]: """ Query influx db for usage for given organisation id @@ -149,14 +154,20 @@ def get_event_list_for_organisation(organisation_id: int, date_range: str = "30d filters=f'|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["organisation_id"] == "{organisation_id}")', extra="|> aggregateWindow(every: 24h, fn: sum)", - date_range=date_range, + date_start=date_start, + date_stop=date_stop, ) dataset = defaultdict(list) labels = [] for result in results: for record in result.records: dataset[record["resource"]].append(record["_value"]) - required_records = int(date_range[:-1]) + 1 + if date_stop == "now()": + required_records = abs(int(date_start[:-1])) + 1 + else: + required_records = ( + abs(int(date_start[:-1])) - abs(int(date_stop[:-1])) + 1 + ) if len(labels) != required_records: labels.append(record.values["_time"].strftime("%Y-%m-%d")) return dataset, labels @@ -166,7 +177,9 @@ def get_multiple_event_list_for_organisation( organisation_id: int, project_id: int = None, environment_id: int = None, -): + date_start: str = "-30d", + date_stop: str = "now()", +) -> list[UsageData]: """ Query influx db for usage for given organisation id @@ -176,7 +189,6 @@ def get_multiple_event_list_for_organisation( :return: a number of requests for flags, traits, identities, environment-document """ - filters = [ 'r._measurement == "api_call"', f'r["organisation_id"] == "{organisation_id}"', @@ -189,6 +201,8 @@ def get_multiple_event_list_for_organisation( filters.append(f'r["environment_id"] == "{environment_id}"') results = InfluxDBWrapper.influx_query_manager( + date_start=date_start, + date_stop=date_stop, filters=build_filter_string(filters), extra="|> aggregateWindow(every: 24h, fn: sum)", ) @@ -201,14 +215,23 @@ def get_multiple_event_list_for_organisation( for i, record in enumerate(result.records): dataset[i][record.values["resource"].capitalize()] = record.values["_value"] dataset[i]["name"] = record.values["_time"].strftime("%Y-%m-%d") + return dataset def get_usage_data( - organisation_id: int, project_id: int = None, environment_id=None -) -> typing.List[UsageData]: + organisation_id: int, + project_id: int | None = None, + environment_id: int | None = None, + date_start: str = "-30d", + date_stop: str = "now()", +) -> list[UsageData]: events_list = get_multiple_event_list_for_organisation( - organisation_id, project_id, environment_id + organisation_id=organisation_id, + project_id=project_id, + environment_id=environment_id, + date_start=date_start, + date_stop=date_stop, ) return UsageDataSchema(many=True).load(events_list) @@ -216,9 +239,9 @@ def get_usage_data( def get_multiple_event_list_for_feature( environment_id: int, feature_name: str, - period: str = "30d", + date_start: str = "-30d", aggregate_every: str = "24h", -) -> typing.List[dict]: +) -> list[dict]: """ Get aggregated request data for the given feature in a given environment across all time, aggregated into time windows of length defined by the period argument. @@ -237,14 +260,14 @@ def get_multiple_event_list_for_feature( :param environment_id: an id of the environment to get usage for :param feature_name: the name of the feature to get usage for - :param period: the influx time period to filter on, e.g. 30d, 7d, etc. + :param date_start: the influx time period to filter on, e.g. -30d, -7d, etc. :param aggregate_every: the influx time period to aggregate the data by, e.g. 24h :return: a list of dicts with feature and request count in a specific environment """ results = InfluxDBWrapper.influx_query_manager( - date_range=period, + date_start=date_start, filters=f'|> filter(fn:(r) => r._measurement == "feature_evaluation") \ |> filter(fn: (r) => r["_field"] == "request_count") \ |> filter(fn: (r) => r["environment_id"] == "{environment_id}") \ @@ -271,16 +294,18 @@ def get_feature_evaluation_data( feature_name: str, environment_id: int, period: str = "30d" ) -> typing.List[FeatureEvaluationData]: data = get_multiple_event_list_for_feature( - feature_name=feature_name, environment_id=environment_id, period=period + feature_name=feature_name, + environment_id=environment_id, + date_start=f"-{period}", ) return FeatureEvaluationDataSchema(many=True).load(data) -def get_top_organisations(date_range: str, limit: str = ""): +def get_top_organisations(date_start: str, limit: str = ""): """ Query influx db top used organisations - :param date_range: data range for top organisations + :param date_start: Start of the date range for top organisations :param limit: limit for query @@ -289,9 +314,9 @@ def get_top_organisations(date_range: str, limit: str = ""): if limit: limit = f"|> limit(n:{limit})" - bucket = range_bucket_mappings[date_range] + bucket = range_bucket_mappings[date_start] results = InfluxDBWrapper.influx_query_manager( - date_range=date_range, + date_start=date_start, bucket=bucket, filters='|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["_field"] == "request_count")', @@ -318,7 +343,7 @@ def get_top_organisations(date_range: str, limit: str = ""): return dataset -def get_current_api_usage(organisation_id: int, date_range: str) -> int: +def get_current_api_usage(organisation_id: int, date_start: str) -> int: """ Query influx db for api usage @@ -330,7 +355,7 @@ def get_current_api_usage(organisation_id: int, date_range: str) -> int: bucket = read_bucket results = InfluxDBWrapper.influx_query_manager( - date_range=date_range, + date_start=date_start, bucket=bucket, filters=build_filter_string( [ diff --git a/api/app_analytics/serializers.py b/api/app_analytics/serializers.py index cca9534fb460..c9512b5a7222 100644 --- a/api/app_analytics/serializers.py +++ b/api/app_analytics/serializers.py @@ -1,5 +1,9 @@ +import typing + from rest_framework import serializers +from .types import PERIOD_TYPE + class UsageDataSerializer(serializers.Serializer): flags = serializers.IntegerField() @@ -12,6 +16,12 @@ class UsageDataSerializer(serializers.Serializer): class UsageDataQuerySerializer(serializers.Serializer): project_id = serializers.IntegerField(required=False) environment_id = serializers.IntegerField(required=False) + period = serializers.ChoiceField( + choices=typing.get_args(PERIOD_TYPE), + allow_null=True, + default=None, + required=False, + ) class UsageTotalCountSerializer(serializers.Serializer): diff --git a/api/app_analytics/tasks.py b/api/app_analytics/tasks.py index 4fbc203dd1bd..0df21b9d5c4e 100644 --- a/api/app_analytics/tasks.py +++ b/api/app_analytics/tasks.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from typing import List, Tuple -from app_analytics.analytics_db_service import ANALYTICS_READ_BUCKET_SIZE +from app_analytics.constants import ANALYTICS_READ_BUCKET_SIZE from django.conf import settings from django.db.models import Count, Q, Sum from django.utils import timezone diff --git a/api/app_analytics/types.py b/api/app_analytics/types.py new file mode 100644 index 000000000000..5472d52b0f8a --- /dev/null +++ b/api/app_analytics/types.py @@ -0,0 +1,9 @@ +from typing import Literal + +from . import constants + +PERIOD_TYPE = Literal[ + constants.CURRENT_BILLING_PERIOD, + constants.PREVIOUS_BILLING_PERIOD, + constants.NINETY_DAY_PERIOD, +] diff --git a/api/conftest.py b/api/conftest.py index 4ab7d3f4adbc..0a7860e9ab96 100644 --- a/api/conftest.py +++ b/api/conftest.py @@ -1012,10 +1012,28 @@ def github_repository( "admin_master_api_key_client", ] ) -def admin_client_new(request, admin_client_original, admin_master_api_key_client): +def admin_client_new( + request: pytest.FixtureRequest, + admin_client_original: APIClient, + admin_master_api_key_client: APIClient, +) -> APIClient: if request.param == "admin_client_original": yield admin_client_original elif request.param == "admin_master_api_key_client": yield admin_master_api_key_client else: assert False, "Request param mismatch" + + +@pytest.fixture() +def superuser(): + return FFAdminUser.objects.create_superuser( + email="superuser@example.com", + password=FFAdminUser.objects.make_random_password(), + ) + + +@pytest.fixture() +def superuser_client(superuser: FFAdminUser, client: APIClient): + client.force_login(superuser, backend="django.contrib.auth.backends.ModelBackend") + return client diff --git a/api/features/views.py b/api/features/views.py index 6ac0312b8ebe..889e4218c37f 100644 --- a/api/features/views.py +++ b/api/features/views.py @@ -353,8 +353,12 @@ def get_influx_data(self, request, pk, project_pk): query_serializer = GetInfluxDataQuerySerializer(data=request.query_params) query_serializer.is_valid(raise_exception=True) + date_start = f"-{query_serializer.data['period']}" events_list = get_multiple_event_list_for_feature( - feature_name=feature.name, **query_serializer.data + feature_name=feature.name, + date_start=date_start, + environment_id=query_serializer.data["environment_id"], + aggregate_every=query_serializer.data["aggregate_every"], ) serializer = FeatureInfluxDataSerializer(instance={"events_list": events_list}) return Response(serializer.data) diff --git a/api/organisations/subscription_info_cache.py b/api/organisations/subscription_info_cache.py index b4e69d98980c..9fc0819b1d84 100644 --- a/api/organisations/subscription_info_cache.py +++ b/api/organisations/subscription_info_cache.py @@ -70,9 +70,9 @@ def _update_caches_with_influx_data( if not settings.INFLUXDB_TOKEN: return - for date_range, limit in (("30d", ""), ("7d", ""), ("24h", "100")): - key = f"api_calls_{date_range}" - org_calls = get_top_organisations(date_range, limit) + for date_start, limit in (("-30d", ""), ("-7d", ""), ("-24h", "100")): + key = f"api_calls_{date_start[1:]}" + org_calls = get_top_organisations(date_start, limit) for org_id, calls in org_calls.items(): subscription_info_cache = organisation_info_cache_dict.get(org_id) if not subscription_info_cache: diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index b9bae7ce1961..4b423d25858c 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -158,7 +158,7 @@ def _handle_api_usage_notifications(organisation: Organisation) -> None: period_starts_at = relativedelta(months=month_delta) + billing_starts_at days = relativedelta(now, period_starts_at).days - api_usage = get_current_api_usage(organisation.id, f"{days}d") + api_usage = get_current_api_usage(organisation.id, f"-{days}d") api_usage_percent = int(100 * api_usage / subscription_cache.allowed_30d_api_calls) diff --git a/api/sales_dashboard/views.py b/api/sales_dashboard/views.py index 3c4cfbfb84e5..fea1235615eb 100644 --- a/api/sales_dashboard/views.py +++ b/api/sales_dashboard/views.py @@ -121,7 +121,7 @@ def get_context_data(self, **kwargs): @staff_member_required -def organisation_info(request, organisation_id): +def organisation_info(request: HttpRequest, organisation_id: int) -> HttpResponse: organisation = get_object_or_404( Organisation.objects.select_related("subscription"), pk=organisation_id ) @@ -154,8 +154,10 @@ def organisation_info(request, organisation_id): date_range = request.GET.get("date_range", "180d") context["date_range"] = date_range + date_start = f"-{date_range}" + date_stop = "now()" event_list, labels = get_event_list_for_organisation( - organisation_id, date_range + organisation_id, date_start, date_stop ) context["event_list"] = event_list context["traits"] = mark_safe(json.dumps(event_list["traits"])) diff --git a/api/tests/test_helpers.py b/api/tests/test_helpers.py index ed75aca5dca6..3608f0470818 100644 --- a/api/tests/test_helpers.py +++ b/api/tests/test_helpers.py @@ -36,7 +36,7 @@ def generate_segment_data( def fix_issue_3869(): """ - Hack to get around Pydantic issue with Freeze Gun. + Hack to get around Pydantic issue with FreezeGun. https://github.com/Flagsmith/flagsmith/issues/3869 """ diff --git a/api/tests/unit/app_analytics/test_analytics_db_service.py b/api/tests/unit/app_analytics/test_analytics_db_service.py index 76f127bda0d3..7941d3d1de4a 100644 --- a/api/tests/unit/app_analytics/test_analytics_db_service.py +++ b/api/tests/unit/app_analytics/test_analytics_db_service.py @@ -1,4 +1,4 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta import pytest from app_analytics.analytics_db_service import ( @@ -8,6 +8,10 @@ get_usage_data, get_usage_data_from_local_db, ) +from app_analytics.constants import ( + CURRENT_BILLING_PERIOD, + PREVIOUS_BILLING_PERIOD, +) from app_analytics.models import ( APIUsageBucket, FeatureEvaluationBucket, @@ -16,9 +20,28 @@ from django.conf import settings from django.utils import timezone from pytest_django.fixtures import SettingsWrapper +from pytest_mock import MockerFixture from environments.models import Environment from features.models import Feature +from organisations.models import ( + Organisation, + OrganisationSubscriptionInformationCache, +) + + +@pytest.fixture +def cache(organisation: Organisation) -> OrganisationSubscriptionInformationCache: + yield OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=timezone.now() - timedelta(days=20), + current_billing_term_ends_at=timezone.now() + timedelta(days=10), + api_calls_24h=2000, + api_calls_7d=12000, + api_calls_30d=38000, + allowed_seats=5, + allowed_30d_api_calls=40000, + ) @pytest.mark.skipif( @@ -308,3 +331,90 @@ def test_get_feature_evaluation_data_calls_get_feature_evaluation_data_from_loca mocked_get_feature_evaluation_data_from_local_db.assert_called_once_with( feature=feature, environment_id=environment.id, period=30 ) + + +@pytest.mark.parametrize( + "period", + [ + CURRENT_BILLING_PERIOD, + PREVIOUS_BILLING_PERIOD, + ], +) +def test_get_usage_data_returns_empty_list_when_unset_subscription_information_cache( + period: str, + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, +) -> None: + # Given + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + assert getattr(organisation, "subscription_information_cache", None) is None + + # When + usage_data = get_usage_data(organisation, period=period) + + # Then + assert usage_data == [] + mocked_get_usage_data_from_local_db.assert_not_called() + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_usage_data_calls_get_usage_data_from_local_db_with_set_period_starts_at_with_current_billing_period( + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, + cache: OrganisationSubscriptionInformationCache, +) -> None: + # Given + period: str = CURRENT_BILLING_PERIOD + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + + assert getattr(organisation, "subscription_information_cache", None) == cache + + # When + get_usage_data(organisation, period=period) + + # Then + mocked_get_usage_data_from_local_db.assert_called_once_with( + organisation=organisation, + environment_id=None, + project_id=None, + date_start=datetime(2022, 12, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + date_stop=datetime(2023, 1, 19, 9, 9, 47, 325132, tzinfo=timezone.utc), + ) + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_usage_data_calls_get_usage_data_from_local_db_with_set_period_starts_at_with_previous_billing_period( + mocker: MockerFixture, + settings: SettingsWrapper, + organisation: Organisation, + cache: OrganisationSubscriptionInformationCache, +) -> None: + # Given + period: str = PREVIOUS_BILLING_PERIOD + + settings.USE_POSTGRES_FOR_ANALYTICS = True + mocked_get_usage_data_from_local_db = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_local_db", autospec=True + ) + + assert getattr(organisation, "subscription_information_cache", None) == cache + + # When + get_usage_data(organisation, period=period) + + # Then + mocked_get_usage_data_from_local_db.assert_called_once_with( + organisation=organisation, + environment_id=None, + project_id=None, + date_start=datetime(2022, 11, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + date_stop=datetime(2022, 12, 30, 9, 9, 47, 325132, tzinfo=timezone.utc), + ) diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py index 04278f7bad38..0a590737410a 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py @@ -1,4 +1,4 @@ -from datetime import date +from datetime import date, timedelta from typing import Generator, Type from unittest import mock from unittest.mock import MagicMock @@ -14,13 +14,18 @@ get_feature_evaluation_data, get_multiple_event_list_for_feature, get_multiple_event_list_for_organisation, + get_top_organisations, get_usage_data, ) from django.conf import settings +from django.utils import timezone from influxdb_client.client.exceptions import InfluxDBError from influxdb_client.rest import ApiException +from pytest_mock import MockerFixture from urllib3.exceptions import HTTPError +from organisations.models import Organisation + # Given org_id = 123 env_id = 1234 @@ -178,6 +183,7 @@ def test_influx_db_query_when_get_events_list_then_query_api_called(monkeypatch) def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_api_called( monkeypatch, project_id, environment_id, expected_filters ): + expected_query = ( ( f'from(bucket:"{read_bucket}") ' @@ -271,7 +277,11 @@ def test_get_usage_data(mocker): # Then mocked_get_multiple_event_list_for_organisation.assert_called_once_with( - org_id, None, None + organisation_id=org_id, + environment_id=None, + project_id=None, + date_start="-30d", + date_stop="now()", ) assert len(usage_data) == 2 @@ -309,7 +319,7 @@ def test_get_feature_evaluation_data(mocker): # Then mocked_get_multiple_event_list_for_feature.assert_called_once_with( - feature_name=feature_name, environment_id=env_id, period="30d" + feature_name=feature_name, environment_id=env_id, date_start="-30d" ) assert len(feature_evaluation_data) == 2 @@ -319,3 +329,113 @@ def test_get_feature_evaluation_data(mocker): assert feature_evaluation_data[1].day == date(year=2023, month=1, day=9) assert feature_evaluation_data[1].count == 200 + + +@pytest.mark.parametrize("date_stop", ["now()", "-5d"]) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_event_list_for_organisation_with_date_stop_set_to_now_and_previously( + date_stop: str, + mocker: MockerFixture, + organisation: Organisation, +) -> None: + # Given + now = timezone.now() + one_day_ago = now - timedelta(days=1) + two_days_ago = now - timedelta(days=2) + + record_mock1 = mock.MagicMock() + record_mock1.__getitem__.side_effect = lambda key: { + "resource": "resource23", + "_value": 23, + }.get(key) + record_mock1.values = {"_time": one_day_ago} + + record_mock2 = mock.MagicMock() + record_mock2.__getitem__.side_effect = lambda key: { + "resource": "resource24", + "_value": 24, + }.get(key) + record_mock2.values = {"_time": two_days_ago} + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset, labels = get_event_list_for_organisation( + organisation_id=organisation.id, + date_stop=date_stop, + ) + + # Then + assert dataset == {"resource23": [23], "resource24": [24]} + assert labels == ["2023-01-18", "2023-01-17"] + + +@pytest.mark.parametrize("limit", ["10", ""]) +def test_get_top_organisations( + limit: str, + mocker: MockerFixture, +) -> None: + # Given + mocker.patch("app_analytics.influxdb_wrapper.range_bucket_mappings") + + record_mock1 = mock.MagicMock() + record_mock1.values = {"organisation": "123-TestOrg"} + record_mock1.get_value.return_value = 23 + + record_mock2 = mock.MagicMock() + record_mock2.values = {"organisation": "456-TestCorp"} + record_mock2.get_value.return_value = 43 + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset = get_top_organisations(date_start="-30d", limit=limit) + + # Then + assert dataset == {123: 23, 456: 43} + + +def test_get_top_organisations_value_error( + mocker: MockerFixture, +) -> None: + # Given + mocker.patch("app_analytics.influxdb_wrapper.range_bucket_mappings") + + record_mock1 = mock.MagicMock() + record_mock1.values = {"organisation": "BadData-TestOrg"} + record_mock1.get_value.return_value = 23 + + record_mock2 = mock.MagicMock() + record_mock2.values = {"organisation": "456-TestCorp"} + record_mock2.get_value.return_value = 43 + + result = mock.MagicMock() + result.records = [record_mock1, record_mock2] + + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + + influx_mock.return_value = [result] + + # When + dataset = get_top_organisations(date_start="-30d") + + # Then + # The wrongly typed data does not stop the remaining data + # from being returned. + assert dataset == {456: 43} diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py index 9137584dc6b5..97a87503c4ca 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py @@ -2,11 +2,17 @@ from datetime import date, timedelta import pytest +from app_analytics.constants import ( + CURRENT_BILLING_PERIOD, + NINETY_DAY_PERIOD, + PREVIOUS_BILLING_PERIOD, +) from app_analytics.dataclasses import UsageData from app_analytics.models import FeatureEvaluationRaw from app_analytics.views import SDKAnalyticsFlags from django.conf import settings from django.urls import reverse +from django.utils import timezone from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture from rest_framework import status @@ -15,6 +21,10 @@ from environments.identities.models import Identity from environments.models import Environment from features.models import Feature +from organisations.models import ( + Organisation, + OrganisationSubscriptionInformationCache, +) def test_sdk_analytics_does_not_allow_bad_data(mocker, settings, environment): @@ -66,6 +76,7 @@ def test_sdk_analytics_allows_valid_data(mocker, settings, environment, feature) def test_get_usage_data(mocker, admin_client, organisation): # Given url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + mocked_get_usage_data = mocker.patch( "app_analytics.views.get_usage_data", autospec=True, @@ -96,7 +107,190 @@ def test_get_usage_data(mocker, admin_client, organisation): "environment_document": 0, }, ] - mocked_get_usage_data.assert_called_once_with(organisation) + mocked_get_usage_data.assert_called_once_with(organisation, period=None) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__current_billing_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={CURRENT_BILLING_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today()), + UsageData(flags=10, day=date.today() - timedelta(days=1)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today()), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=1)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-28d", + date_stop="now()", + ) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__previous_billing_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={PREVIOUS_BILLING_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today() - timedelta(days=29)), + UsageData(flags=10, day=date.today() - timedelta(days=30)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today() - timedelta(days=29)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=30)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-59d", + date_stop="-28d", + ) + + +@pytest.mark.freeze_time("2024-04-30T09:09:47.325132+00:00") +def test_get_usage_data__ninety_day_period( + mocker: MockerFixture, + admin_client_new: APIClient, + organisation: Organisation, +) -> None: + # Given + url = reverse("api-v1:organisations:usage-data", args=[organisation.id]) + url += f"?period={NINETY_DAY_PERIOD}" + + mocked_get_usage_data = mocker.patch( + "app_analytics.analytics_db_service.get_usage_data_from_influxdb", + autospec=True, + return_value=[ + UsageData(flags=10, day=date.today()), + UsageData(flags=10, day=date.today() - timedelta(days=1)), + ], + ) + + now = timezone.now() + week_from_now = now + timedelta(days=7) + four_weeks_ago = now - timedelta(days=28) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + current_billing_term_starts_at=four_weeks_ago, + current_billing_term_ends_at=week_from_now, + allowed_30d_api_calls=1_000_000, + ) + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + assert response.json() == [ + { + "flags": 10, + "day": str(date.today()), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + { + "flags": 10, + "day": str(date.today() - timedelta(days=1)), + "identities": 0, + "traits": 0, + "environment_document": 0, + }, + ] + + mocked_get_usage_data.assert_called_once_with( + organisation_id=organisation.id, + environment_id=None, + project_id=None, + date_start="-90d", + date_stop="now()", + ) def test_get_usage_data_for_non_admin_user_returns_403( diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py index c579d1ab0af9..20f15cf00b6a 100644 --- a/api/tests/unit/features/test_unit_features_views.py +++ b/api/tests/unit/features/test_unit_features_views.py @@ -455,7 +455,7 @@ def test_get_project_features_influx_data( mock_get_event_list.assert_called_once_with( feature_name=feature.name, environment_id=str(environment.id), # provided as a GET param - period="24h", # this is the default but can be provided as a GET param + date_start="-24h", # this is the default but can be provided as a GET param aggregate_every="24h", # this is the default but can be provided as a GET param ) diff --git a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py index 6c3edf1d8b7f..2cbe1daee0d8 100644 --- a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py +++ b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py @@ -15,7 +15,7 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): "organisations.subscription_info_cache.get_top_organisations" ) mocked_get_top_organisations.side_effect = lambda t, _: { - organisation.id: organisation_usage.get(t) + organisation.id: organisation_usage.get(f"{t[1:]}") } chargebee_metadata = ChargebeeObjMetadata(seats=15, api_calls=1000000) @@ -59,7 +59,7 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): assert mocked_get_top_organisations.call_count == 3 assert [call[0] for call in mocked_get_top_organisations.call_args_list] == [ - ("30d", ""), - ("7d", ""), - ("24h", "100"), + ("-30d", ""), + ("-7d", ""), + ("-24h", "100"), ] diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 8082698ef5c7..cac9b9197b69 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -312,7 +312,7 @@ def test_handle_api_usage_notifications_below_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "14d") + mock_api_usage.assert_called_once_with(organisation.id, "-14d") assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -405,7 +405,7 @@ def test_handle_api_usage_notifications_above_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "14d") + mock_api_usage.assert_called_once_with(organisation.id, "-14d") assert len(mailoutbox) == 1 email = mailoutbox[0] diff --git a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py index 889e42b9655a..64ea8465bf4a 100644 --- a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py +++ b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py @@ -1,5 +1,9 @@ import pytest from django.test import RequestFactory +from django.urls import reverse +from pytest_django.fixtures import SettingsWrapper +from pytest_mock import MockerFixture +from rest_framework.test import APIClient from organisations.models import ( Organisation, @@ -30,3 +34,32 @@ def test_organisation_subscription_get_api_call_overage( result = view.get_queryset().get(pk=organisation.id) assert result.overage == expected_overage + + +def test_get_organisation_info__get_event_list_for_organisation( + organisation: Organisation, + superuser_client: APIClient, + settings: SettingsWrapper, + mocker: MockerFixture, +) -> None: + # Given + settings.INFLUXDB_TOKEN = "AFancyToken" + + url = reverse("sales_dashboard:organisation_info", args=[organisation.id]) + + event_list_mock = mocker.patch( + "sales_dashboard.views.get_event_list_for_organisation" + ) + event_list_mock.return_value = ( + {"traits": [], "identities": [], "flags": [], "environment-document": []}, + ["label1", "label2"], + ) + mocker.patch("sales_dashboard.views.get_events_for_organisation") + + # When + response = superuser_client.get(url) + + # Then + assert "label1" in str(response.content) + assert "label2" in str(response.content) + event_list_mock.assert_called_once_with(organisation.id, "-180d", "now()")