Skip to content

Commit

Permalink
Added an example of grafane monitoring dashboard
Browse files Browse the repository at this point in the history
  • Loading branch information
emeli-dral committed Nov 4, 2023
1 parent f4d3004 commit 63a49fd
Show file tree
Hide file tree
Showing 5 changed files with 171 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
apiVersion: 1

providers:
# <string> an unique provider name. Required
- name: 'Evidently Dashboards'
# <int> Org id. Default to 1
orgId: 1
# <string> name of the dashboard folder.
folder: ''
# <string> folder UID. will be automatically generated if not specified
folderUid: ''
# <string> provider type. Default to 'file'
type: file
# <bool> disable dashboard deletion
disableDeletion: false
# <int> how often Grafana will scan for changed dashboards
updateIntervalSeconds: 10
# <bool> allow updating provisioned dashboards from the UI
allowUiUpdates: false
options:
# <string, required> path to dashboard files on disk. Required when using the 'file' type
path: /opt/grafana/dashboards
# <bool> use folder names from filesystem to create folders in Grafana
foldersFromFilesStructure: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# config file version
apiVersion: 1

# list of datasources that should be deleted from the database
deleteDatasources:
- name: Prometheus
orgId: 1

# list of datasources to insert/update
# available in the database
datasources:
# - name: Prometheus
# type: prometheus
# access: proxy
# url: http://prometheus.:9090
- name: PostgreSQL
type: postgres
access: proxy
url: db.:5432
database: test
user: postgres
secureJsonData:
password: 'example'
jsonData:
sslmode: 'disable'
42 changes: 42 additions & 0 deletions module6/grafana_monitoring_dashboard/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
version: '3.7'

volumes:
grafana_data: {}

networks:
front-tier:
back-tier:

services:
db:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: example
ports:
- "5432:5432"
networks:
- back-tier

adminer:
image: adminer
restart: always
ports:
- "8080:8080"
networks:
- back-tier
- front-tier

grafana:
image: grafana/grafana:8.5.21
user: "472"
ports:
- "3000:3000"
volumes:
- ./config/grafana_datasources.yaml:/etc/grafana/provisioning/datasources/datasource.yaml:ro
- ./config/grafana_dashboards.yaml:/etc/grafana/provisioning/dashboards/dashboards.yaml:ro
- ./dashboards:/opt/grafana/dashboards
networks:
- back-tier
- front-tier
restart: always
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import datetime
import time
import logging
import psycopg

import pandas as pd

from sklearn import datasets

from evidently.report import Report
from evidently.metrics import ColumnDriftMetric, DatasetDriftMetric

logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s]: %(message)s")


create_table_statement = """
drop table if exists drift_metrics;
create table drift_metrics(
timestamp timestamp,
target_drift float,
share_drifted_columns float
)
"""

bank_marketing = datasets.fetch_openml(name='bank-marketing', as_frame='auto')
bank_marketing_data = bank_marketing.frame

reference_data = bank_marketing_data[5000:5500]
prod_simulation_data = bank_marketing_data[7000:]
mini_batch_size = 50

def prep_db():
with psycopg.connect("host=localhost port=5432 user=postgres password=example", autocommit=True) as conn:
res = conn.execute("SELECT 1 FROM pg_database WHERE datname='test'")
if len(res.fetchall()) == 0:
conn.execute("create database test;")
with psycopg.connect("host=localhost port=5432 dbname=test user=postgres password=example") as conn:
conn.execute(create_table_statement)

def calculate_metrics_postgresql(curr, i):
report = Report(
metrics=[
DatasetDriftMetric(),
ColumnDriftMetric(column_name="Class"),
])

report.run(reference_data=reference_data, current_data=prod_simulation_data[i * mini_batch_size : (i + 1) * mini_batch_size])

result = report.as_dict()

target_drift = result['metrics'][1]['result']['drift_score']
share_drifted_columns = result['metrics'][0]['result']['share_of_drifted_columns']

curr.execute(
"insert into drift_metrics(timestamp, target_drift, share_drifted_columns) values (%s, %s, %s)",
(datetime.datetime.now(), target_drift, share_drifted_columns)
)

def batch_monitoring_backfill():
prep_db()
with psycopg.connect("host=localhost port=5432 dbname=test user=postgres password=example", autocommit=True) as conn:
for i in range(50):
with conn.cursor() as curr:
calculate_metrics_postgresql(curr, i)
logging.info("data sent")
time.sleep(3)


if __name__ == '__main__':
batch_monitoring_backfill()
10 changes: 10 additions & 0 deletions module6/grafana_monitoring_dashboard/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
evidently==0.4.8
pandas>=1.3.5
numpy>=1.19.5
scikit-learn>=0.24.0
jupyter>=1.0.0

requests
pyarrow
psycopg
psycopg_binary

0 comments on commit 63a49fd

Please sign in to comment.