Skip to content

Commit

Permalink
feat: Integrate Grafana
Browse files Browse the repository at this point in the history
 - Make Grafana integration project-level
 - Add `get_audited_instance_from_audit_log_record` service
 - Add tags, including feature tags
 - Improve docs
  • Loading branch information
khvn26 committed Jun 23, 2024
1 parent 6c185c2 commit e5272ee
Show file tree
Hide file tree
Showing 19 changed files with 645 additions and 289 deletions.
4 changes: 4 additions & 0 deletions api/audit/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def history_record(self) -> typing.Optional[Model]:
klass = self.get_history_record_model_class(self.history_record_class_path)
return klass.objects.filter(history_id=self.history_record_id).first()

@property
def project_name(self) -> str:
return getattr(self.project, "name", "unknown")

@property
def environment_name(self) -> str:
return getattr(self.environment, "name", "unknown")
Expand Down
63 changes: 63 additions & 0 deletions api/audit/services.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from core.models import AbstractBaseAuditableModel

from audit.models import AuditLog
from audit.related_object_type import RelatedObjectType
from features.models import Feature, FeatureState
from features.versioning.models import EnvironmentFeatureVersion


def get_audited_instance_from_audit_log_record(
audit_log_record: AuditLog,
) -> AbstractBaseAuditableModel | None:
"""
Given an `AuditLog` model instance, return a model instance that produced the log.
"""
# There's currently four (sigh) ways an audit log record is created:
# 1. Historical record
# 2. Segment priorities changed
# 3. Change request
# 4. Environment feature version published

# Try getting the historical record first.
if history_record := audit_log_record.history_record:
return history_record.instance

# Try to infer the model class from `AuditLog.related_object_type`.
match audit_log_record.related_object_type:
# Assume segment priorities change.
case RelatedObjectType.FEATURE.name:
return (
Feature.objects.all_with_deleted()
.filter(
pk=audit_log_record.related_object_id,
project=audit_log_record.project,
)
.first()
)

# Assume change request.
case RelatedObjectType.FEATURE_STATE.name:
return (
FeatureState.objects.all_with_deleted()
.filter(
pk=audit_log_record.related_object_id,
environment=audit_log_record.environment,
)
.first()
)

# Assume environment feature version.
case RelatedObjectType.EF_VERSION.name:
return (
EnvironmentFeatureVersion.objects.all_with_deleted()
.filter(
uuid=audit_log_record.related_object_uuid,
environment=audit_log_record.environment,
)
.first()
)

# All known audit log sources exhausted by now.
# Since `RelatedObjectType` is not a 1:1 mapping to a model class,
# generalised heuristics might be dangerous.
return None
6 changes: 3 additions & 3 deletions api/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ class Meta:
return BaseHistoricalModel


class _AbstractBaseAuditableModel(models.Model):
class AbstractBaseAuditableModel(models.Model):
"""
A base Model class that all models we want to be included in the audit log should inherit from.
Expand Down Expand Up @@ -196,8 +196,8 @@ def abstract_base_auditable_model_factory(
historical_records_excluded_fields: typing.List[str] = None,
change_details_excluded_fields: typing.Sequence[str] = None,
show_change_details_for_create: bool = False,
) -> typing.Type[_AbstractBaseAuditableModel]:
class Base(_AbstractBaseAuditableModel):
) -> typing.Type[AbstractBaseAuditableModel]:
class Base(AbstractBaseAuditableModel):
history = HistoricalRecords(
bases=[
base_historical_model_factory(
Expand Down
4 changes: 2 additions & 2 deletions api/core/signals.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from core.models import _AbstractBaseAuditableModel
from core.models import AbstractBaseAuditableModel
from django.conf import settings
from django.utils import timezone
from simple_history.models import HistoricalRecords
Expand All @@ -9,7 +9,7 @@


def create_audit_log_from_historical_record(
instance: _AbstractBaseAuditableModel,
instance: AbstractBaseAuditableModel,
history_user: FFAdminUser,
history_instance,
**kwargs,
Expand Down
35 changes: 15 additions & 20 deletions api/integrations/grafana/grafana.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,41 @@
import json
import logging
import time
from typing import Any

import requests

from audit.models import AuditLog
from integrations.common.wrapper import AbstractBaseEventIntegrationWrapper
from integrations.grafana.mappers import (
map_audit_log_record_to_grafana_annotation,
)

logger = logging.getLogger(__name__)

ANNOTATIONS_API_URI = "api/annotations"
ROUTE_API_ANNOTATIONS = "/api/annotations"


class GrafanaWrapper(AbstractBaseEventIntegrationWrapper):
def __init__(self, base_url: str, api_key: str) -> None:
self.url = f"{base_url}{ANNOTATIONS_API_URI}"
base_url = base_url[:-1] if base_url.endswith("/") else base_url
self.url = f"{base_url}{ROUTE_API_ANNOTATIONS}"
self.api_key = api_key

@staticmethod
def generate_event_data(audit_log_record: AuditLog) -> dict:
log = audit_log_record.log
email = audit_log_record.author_identifier
def generate_event_data(audit_log_record: AuditLog) -> dict[str, Any]:
return map_audit_log_record_to_grafana_annotation(audit_log_record)

epoch_time_in_milliseconds = round(time.time() * 1000)

return {
"text": f"{log} by user {email}",
"dashboardUID": "",
"tags": ["Flagsmith Event"],
"time": epoch_time_in_milliseconds,
"timeEnd": epoch_time_in_milliseconds,
}

def _headers(self) -> dict:
def _headers(self) -> dict[str, str]:
return {
"Content-Type": "application/json",
"Authorization": "Bearer %s" % self.api_key,
"Authorization": f"Bearer {self.api_key}",
}

def _track_event(self, event: dict) -> None:
def _track_event(self, event: dict[str, Any]) -> None:
response = requests.post(
self.url, headers=self._headers(), data=json.dumps(event)
url=self.url,
headers=self._headers(),
data=json.dumps(event),
)

logger.debug(
Expand Down
72 changes: 72 additions & 0 deletions api/integrations/grafana/mappers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from audit.models import AuditLog
from audit.services import get_audited_instance_from_audit_log_record
from features.models import (
Feature,
FeatureSegment,
FeatureState,
FeatureStateValue,
)
from integrations.grafana.types import GrafanaAnnotation
from segments.models import Segment


def _get_feature_tags(
feature: Feature,
) -> list[str]:
return list(feature.tags.values_list("label", flat=True))


def _get_instance_tags_from_audit_log_record(
audit_log_record: AuditLog,
) -> list[str]:
if instance := get_audited_instance_from_audit_log_record(audit_log_record):
if isinstance(instance, Feature):
return [
f"feature:{instance.name}",
*_get_feature_tags(instance),
]

if isinstance(instance, FeatureState):
return [
f"feature:{(feature := instance.feature).name}",
f'flag:{"enabled" if instance.enabled else "disabled"}',
*_get_feature_tags(feature),
]

if isinstance(instance, FeatureStateValue):
return [
f"feature:{(feature := instance.feature_state.feature).name}",
*_get_feature_tags(feature),
]

if isinstance(instance, Segment):
return [f"segment:{instance.name}"]

if isinstance(instance, FeatureSegment):
return [
f"feature:{(feature := instance.feature).name}",
f"segment:{instance.segment.name}",
*_get_feature_tags(feature),
]

return []


def map_audit_log_record_to_grafana_annotation(
audit_log_record: AuditLog,
) -> GrafanaAnnotation:
tags = [
"flagsmith",
f"project:{audit_log_record.project_name}",
f"environment:{audit_log_record.environment_name}",
f"by:{audit_log_record.author_identifier}",
*_get_instance_tags_from_audit_log_record(audit_log_record),
]
time = int(audit_log_record.created_date.timestamp() * 1000) # ms since epoch

return {
"tags": tags,
"text": audit_log_record.log,
"time": time,
"timeEnd": time,
}
4 changes: 2 additions & 2 deletions api/integrations/grafana/migrations/0001_initial.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Generated by Django 3.2.25 on 2024-06-14 13:45
# Generated by Django 3.2.25 on 2024-06-21 20:31

from django.db import migrations, models
import django.db.models.deletion
Expand All @@ -23,7 +23,7 @@ class Migration(migrations.Migration):
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('api_key', models.CharField(max_length=100)),
('base_url', models.URLField(null=True)),
('environment', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='grafana_config', to='environments.environment')),
('project', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='grafana_config', to='projects.Project')),
],
options={
'abstract': False,
Expand Down
39 changes: 34 additions & 5 deletions api/integrations/grafana/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,43 @@

from django.db import models

from environments.models import Environment
from integrations.common.models import EnvironmentIntegrationModel
from integrations.common.models import IntegrationsModel
from projects.models import Project

logger = logging.getLogger(__name__)


class GrafanaConfiguration(EnvironmentIntegrationModel):
class GrafanaConfiguration(IntegrationsModel):
"""
Example `integration_data` entry:
```
"grafana": {
"perEnvironment": false,
"image": "/static/images/integrations/grafana.svg",
"docs": "https://docs.flagsmith.com/integrations/apm/grafana",
"fields": [
{
"key": "base_url",
"label": "Base URL",
"default": "https://grafana.com"
},
{
"key": "api_key",
"label": "Service account token",
"hidden": true
}
],
"tags": [
"logging"
],
"title": "Grafana",
"description": "Receive Flagsmith annotations to your Grafana instance on feature flag and segment changes."
},
```
"""

base_url = models.URLField(blank=False, null=True)
environment = models.OneToOneField(
Environment, related_name="grafana_config", on_delete=models.CASCADE
project = models.OneToOneField(
Project, on_delete=models.CASCADE, related_name="grafana_config"
)
4 changes: 2 additions & 2 deletions api/integrations/grafana/serializers.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from integrations.common.serializers import (
BaseEnvironmentIntegrationModelSerializer,
BaseProjectIntegrationModelSerializer,
)

from .models import GrafanaConfiguration


class GrafanaConfigurationSerializer(BaseEnvironmentIntegrationModelSerializer):
class GrafanaConfigurationSerializer(BaseProjectIntegrationModelSerializer):
class Meta:
model = GrafanaConfiguration
fields = ("id", "base_url", "api_key")
8 changes: 8 additions & 0 deletions api/integrations/grafana/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from typing import TypedDict


class GrafanaAnnotation(TypedDict):
tags: list[str]
text: str
time: int
timeEnd: int
4 changes: 2 additions & 2 deletions api/integrations/grafana/views.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from integrations.common.views import EnvironmentIntegrationCommonViewSet
from integrations.common.views import ProjectIntegrationBaseViewSet
from integrations.grafana.models import GrafanaConfiguration
from integrations.grafana.serializers import GrafanaConfigurationSerializer


class GrafanaConfigurationViewSet(EnvironmentIntegrationCommonViewSet):
class GrafanaConfigurationViewSet(ProjectIntegrationBaseViewSet):
serializer_class = GrafanaConfigurationSerializer
pagination_class = None # set here to ensure documentation is correct
model_class = GrafanaConfiguration
Loading

0 comments on commit e5272ee

Please sign in to comment.