Skip to content

Commit

Permalink
feat: Create split testing for multivariate (#3235)
Browse files Browse the repository at this point in the history
Co-authored-by: Matthew Elwell <matthew.elwell@flagsmith.com>
  • Loading branch information
zachaysan and matthewelwell authored Feb 12, 2024
1 parent 5e84a05 commit ad3ce0e
Show file tree
Hide file tree
Showing 13 changed files with 377 additions and 13 deletions.
37 changes: 34 additions & 3 deletions api/api/urls/v1.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from app_analytics.views import SDKAnalyticsFlags, SelfHostedTelemetryAPIView
from django.conf import settings
from django.conf.urls import url
from django.urls import include
from django.urls import include, path
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
from rest_framework import authentication, permissions, routers
Expand Down Expand Up @@ -47,8 +48,12 @@
url(r"^flags/$", SDKFeatureStates.as_view(), name="flags"),
url(r"^identities/$", SDKIdentities.as_view(), name="sdk-identities"),
url(r"^traits/", include(traits_router.urls), name="traits"),
url(r"^analytics/flags/$", SDKAnalyticsFlags.as_view()),
url(r"^analytics/telemetry/$", SelfHostedTelemetryAPIView.as_view()),
url(r"^analytics/flags/$", SDKAnalyticsFlags.as_view(), name="analytics-flags"),
url(
r"^analytics/telemetry/$",
SelfHostedTelemetryAPIView.as_view(),
name="analytics-telemetry",
),
url(
r"^environment-document/$",
SDKEnvironmentAPIView.as_view(),
Expand All @@ -67,3 +72,29 @@
name="schema-swagger-ui",
),
]

if settings.SPLIT_TESTING_INSTALLED:
from split_testing.views import (
ConversionEventTypeView,
CreateConversionEventView,
SplitTestViewSet,
)

split_testing_router = routers.DefaultRouter()
split_testing_router.register(r"", SplitTestViewSet, basename="split-tests")

urlpatterns += [
url(
r"^split-testing/", include(split_testing_router.urls), name="split-testing"
),
url(
r"^split-testing/conversion-events/",
CreateConversionEventView.as_view(),
name="conversion-events",
),
path(
"conversion_event_types/",
ConversionEventTypeView.as_view(),
name="conversion-event-types",
),
]
8 changes: 8 additions & 0 deletions api/api/urls/v2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from app_analytics.views import SDKAnalyticsFlagsV2
from django.conf.urls import url

app_name = "v2"

urlpatterns = [
url(r"^analytics/flags/$", SDKAnalyticsFlagsV2.as_view(), name="analytics-flags")
]
5 changes: 5 additions & 0 deletions api/app/settings/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1152,3 +1152,8 @@

WEBHOOK_BACKOFF_BASE = env.int("WEBHOOK_BACKOFF_BASE", default=2)
WEBHOOK_BACKOFF_RETRIES = env.int("WEBHOOK_BACKOFF_RETRIES", default=3)

# Split Testing settings
SPLIT_TESTING_INSTALLED = importlib.util.find_spec("split_testing")
if SPLIT_TESTING_INSTALLED:
INSTALLED_APPS += ("split_testing",)
1 change: 1 addition & 0 deletions api/app/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
urlpatterns = [
url(r"^api/v1/", include("api.urls.deprecated", namespace="api-deprecated")),
url(r"^api/v1/", include("api.urls.v1", namespace="api-v1")),
url(r"^api/v2/", include("api.urls.v2", namespace="api-v2")),
url(r"^admin/", admin.site.urls),
url(r"^health", include("health_check.urls", namespace="health")),
url(r"^version", views.version_info, name="version-info"),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Generated by Django 3.2.23 on 2024-01-02 16:35

from django.db import migrations, models
from core.migration_helpers import PostgresOnlyRunSQL


class Migration(migrations.Migration):

atomic = False

dependencies = [
('app_analytics', '0001_initial'),
]

operations = [
migrations.AddField(
model_name='featureevaluationraw',
name='identity_identifier',
field=models.CharField(default=None, max_length=2000, null=True),
),
migrations.AddField(
model_name='featureevaluationraw',
name='enabled_when_evaluated',
field=models.BooleanField(null=True, default=None),
),
migrations.SeparateDatabaseAndState(
state_operations=[
migrations.AlterField(
model_name='featureevaluationraw',
name='feature_name',
field=models.CharField(db_index=True, max_length=2000),
),
],
database_operations=[
PostgresOnlyRunSQL(
'CREATE INDEX CONCURRENTLY "app_analytics_featureevaluationraw_feature_name_idx" ON "app_analytics_featureevaluationraw" ("feature_name");',
reverse_sql='DROP INDEX CONCURRENTLY "app_analytics_featureevaluationraw_feature_name_idx";',
)
],
),
]
6 changes: 5 additions & 1 deletion api/app_analytics/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,15 @@ def check_overlapping_buckets(self):


class FeatureEvaluationRaw(models.Model):
feature_name = models.CharField(max_length=2000)
feature_name = models.CharField(db_index=True, max_length=2000)
environment_id = models.PositiveIntegerField()
evaluation_count = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True)

# Both stored for tracking multivariate split testing.
identity_identifier = models.CharField(max_length=2000, null=True, default=None)
enabled_when_evaluated = models.BooleanField(null=True, default=None)


class FeatureEvaluationBucket(AbstractBucket):
feature_name = models.CharField(max_length=2000)
Expand Down
11 changes: 11 additions & 0 deletions api/app_analytics/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,14 @@ class UsageDataQuerySerializer(serializers.Serializer):

class UsageTotalCountSerializer(serializers.Serializer):
count = serializers.IntegerField()


class SDKAnalyticsFlagsSerializerDetail(serializers.Serializer):
feature_name = serializers.CharField()
identity_identifier = serializers.CharField(required=False, default=None)
enabled_when_evaluated = serializers.BooleanField()
count = serializers.IntegerField()


class SDKAnalyticsFlagsSerializer(serializers.Serializer):
evaluations = SDKAnalyticsFlagsSerializerDetail(many=True)
23 changes: 22 additions & 1 deletion api/app_analytics/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,28 @@ def clean_up_old_analytics_data():


@register_task_handler()
def track_feature_evaluation(environment_id, feature_evaluations):
def track_feature_evaluation_v2(
environment_id: int, feature_evaluations: list[dict[str, int | str | bool]]
) -> None:
feature_evaluation_objects = []
for feature_evaluation in feature_evaluations:
feature_evaluation_objects.append(
FeatureEvaluationRaw(
environment_id=environment_id,
feature_name=feature_evaluation["feature_name"],
evaluation_count=feature_evaluation["count"],
identity_identifier=feature_evaluation["identity_identifier"],
enabled_when_evaluated=feature_evaluation["enabled_when_evaluated"],
)
)
FeatureEvaluationRaw.objects.bulk_create(feature_evaluation_objects)


@register_task_handler()
def track_feature_evaluation(
environment_id: int,
feature_evaluations: dict[str, int],
) -> None:
feature_evaluation_objects = []
for feature_name, evaluation_count in feature_evaluations.items():
feature_evaluation_objects.append(
Expand Down
30 changes: 29 additions & 1 deletion api/app_analytics/track.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from six.moves.urllib.parse import quote # python 2/3 compatible urllib import

from environments.models import Environment
from task_processor.decorators import register_task_handler
from util.util import postpone

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -125,7 +126,10 @@ def track_request_influxdb(request):
influxdb.write()


def track_feature_evaluation_influxdb(environment_id, feature_evaluations):
@register_task_handler()
def track_feature_evaluation_influxdb(
environment_id: int, feature_evaluations: dict[str, int]
) -> None:
"""
Sends Feature analytics event data to InfluxDB
Expand All @@ -139,3 +143,27 @@ def track_feature_evaluation_influxdb(environment_id, feature_evaluations):
influxdb.add_data_point("request_count", evaluation_count, tags=tags)

influxdb.write()


@register_task_handler()
def track_feature_evaluation_influxdb_v2(
environment_id: int, feature_evaluations: list[dict[str, int | str | bool]]
) -> None:
"""
Sends Feature analytics event data to InfluxDB
:param environment_id: (int) the id of the environment the feature is being evaluated within
:param feature_evaluations: (list) A collection of feature evaluations including feature name / evaluation counts.
"""
influxdb = InfluxDBWrapper("feature_evaluation")

for feature_evaluation in feature_evaluations:
feature_name = feature_evaluation["feature_name"]
evaluation_count = feature_evaluation["count"]

# Note that "feature_id" is a misnamed as it's actually to
# the name of the feature. This was to match existing behavior.
tags = {"feature_id": feature_name, "environment_id": environment_id}
influxdb.add_data_point("request_count", evaluation_count, tags=tags)

influxdb.write()
86 changes: 82 additions & 4 deletions api/app_analytics/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,22 @@
get_total_events_count,
get_usage_data,
)
from app_analytics.tasks import track_feature_evaluation
from app_analytics.track import track_feature_evaluation_influxdb
from app_analytics.tasks import (
track_feature_evaluation,
track_feature_evaluation_v2,
)
from app_analytics.track import (
track_feature_evaluation_influxdb,
track_feature_evaluation_influxdb_v2,
)
from django.conf import settings
from drf_yasg.utils import swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.fields import IntegerField
from rest_framework.generics import CreateAPIView, GenericAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import Serializer
from telemetry.serializers import TelemetrySerializer
Expand All @@ -24,6 +31,7 @@

from .permissions import UsageDataPermission
from .serializers import (
SDKAnalyticsFlagsSerializer,
UsageDataQuerySerializer,
UsageDataSerializer,
UsageTotalCountSerializer,
Expand All @@ -32,6 +40,61 @@
logger = logging.getLogger(__name__)


class SDKAnalyticsFlagsV2(CreateAPIView):
permission_classes = (EnvironmentKeyPermissions,)
authentication_classes = (EnvironmentKeyAuthentication,)
serializer_class = SDKAnalyticsFlagsSerializer
throttle_classes = []

def create(self, request: Request, *args, **kwargs) -> Response:
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)

self.evaluations = serializer.validated_data["evaluations"]
if not self._is_data_valid():
return Response(
{"detail": "Invalid feature names associated with the project."},
content_type="application/json",
status=status.HTTP_400_BAD_REQUEST,
)
if settings.USE_POSTGRES_FOR_ANALYTICS:
track_feature_evaluation_v2.delay(
args=(
request.environment.id,
self.evaluations,
)
)
elif settings.INFLUXDB_TOKEN:
track_feature_evaluation_influxdb_v2.delay(
args=(
request.environment.id,
self.evaluations,
)
)

return Response(status=status.HTTP_204_NO_CONTENT)

def _is_data_valid(self) -> bool:
environment_feature_names = set(
FeatureState.objects.filter(
environment=self.request.environment,
feature_segment=None,
identity=None,
).values_list("feature__name", flat=True)
)

valid = True
for evaluation in self.evaluations:
if evaluation["feature_name"] in environment_feature_names:
continue
logger.warning(
f"Feature {evaluation['feature_name']} does not belong to project"
)
valid = False

return valid


class SDKAnalyticsFlags(GenericAPIView):
"""
Class to handle flag analytics events
Expand Down Expand Up @@ -65,6 +128,10 @@ def get_fields(self):
def post(self, request, *args, **kwargs):
"""
Send flag evaluation events from the SDK back to the API for reporting.
TODO: Eventually replace this with the v2 version of
this endpoint once SDKs have been updated.
"""
is_valid = self._is_data_valid()
if not is_valid:
Expand All @@ -74,10 +141,21 @@ def post(self, request, *args, **kwargs):
content_type="application/json",
status=status.HTTP_200_OK,
)

if settings.USE_POSTGRES_FOR_ANALYTICS:
track_feature_evaluation.delay(args=(request.environment.id, request.data))
track_feature_evaluation.delay(
args=(
request.environment.id,
request.data,
)
)
elif settings.INFLUXDB_TOKEN:
track_feature_evaluation_influxdb(request.environment.id, request.data)
track_feature_evaluation_influxdb.delay(
args=(
request.environment.id,
request.data,
)
)

return Response(status=status.HTTP_200_OK)

Expand Down
1 change: 1 addition & 0 deletions api/integrations/sentry/samplers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"/api/v1/traits/bulk",
"/api/v1/environment-document",
"/api/v1/analytics/flags",
"/api/v2/analytics/flags",
}


Expand Down
2 changes: 1 addition & 1 deletion api/tests/unit/app_analytics/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

if "analytics" not in settings.DATABASES:
pytest.skip(
"Skip test if analytics database is configured", allow_module_level=True
"Skip test if analytics database is not configured", allow_module_level=True
)


Expand Down
Loading

0 comments on commit ad3ce0e

Please sign in to comment.