Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DPE-2804] Tweak tests #160

Merged
merged 8 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 0 additions & 16 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,21 +46,6 @@ jobs:
- name: Run tests
run: tox run -e unit

lib-check:
name: Check libraries
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Check libs
uses: canonical/charming-actions/check-libraries@2.4.0
with:
credentials: "${{ secrets.CHARMHUB_TOKEN }}"
github-token: "${{ secrets.GITHUB_TOKEN }}"

build:
name: Build charms
uses: canonical/data-platform-workflows/.github/workflows/build_charms_with_cache.yaml@v2
Expand All @@ -83,7 +68,6 @@ jobs:
- "3.1.6" # renovate: latest juju 3
name: ${{ matrix.tox-environments }} | ${{ matrix.agent-versions }}
needs:
- lib-check
- lint
- unit-test
- build
Expand Down
34 changes: 34 additions & 0 deletions .github/workflows/lib-check.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# Copyright 2022 Canonical Ltd.
# See LICENSE file for licensing details.
name: Check libs

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

on:
pull_request:
paths-ignore:
- '.gitignore'
- '.jujuignore'
- 'LICENSE'
- '**.md'
- 'renovate.json'

jobs:
lib-check:
name: Check libraries
runs-on: ubuntu-latest
timeout-minutes: 5
if: ${{ github.event.pull_request.head.repo.full_name == 'canonical/pgbouncer-k8s-operator' }}
steps:
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
with:
fetch-depth: 0
- name: Check libs
uses: canonical/charming-actions/check-libraries@2.4.0
with:
credentials: "${{ secrets.CHARMHUB_TOKEN }}"
github-token: "${{ secrets.GITHUB_TOKEN }}"

1 change: 1 addition & 0 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ on:
- poetry.lock
- pyproject.toml
- '.github/workflows/ci.yaml'
- '.github/workflows/lib-check.yaml'

jobs:
ci-tests:
Expand Down
6 changes: 3 additions & 3 deletions lib/charms/data_platform_libs/v0/upgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def restart(self, event) -> None:
import json
import logging
from abc import ABC, abstractmethod
from typing import List, Literal, Optional, Set, Tuple
from typing import Dict, List, Literal, Optional, Set, Tuple

import poetry.core.constraints.version as poetry_version
from ops.charm import (
Expand All @@ -285,7 +285,7 @@ def restart(self, event) -> None:

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 14
LIBPATCH = 15

PYDEPS = ["pydantic>=1.10,<2", "poetry-core"]

Expand Down Expand Up @@ -346,7 +346,7 @@ class KafkaDependenciesModel(BaseModel):
print(model.dict()) # exporting back validated deps
"""

dependencies: dict[str, str]
dependencies: Dict[str, str]
name: str
upgrade_supported: str
version: str
Expand Down
91 changes: 79 additions & 12 deletions lib/charms/postgresql_k8s/v0/postgresql.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 17
LIBPATCH = 18

INVALID_EXTRA_USER_ROLE_BLOCKING_MESSAGE = "invalid role(s) for extra user roles"

Expand Down Expand Up @@ -310,6 +310,32 @@ def enable_disable_extension(self, extension: str, enable: bool, database: str =
if connection is not None:
connection.close()

def get_postgresql_text_search_configs(self) -> Set[str]:
"""Returns the PostgreSQL available text search configs.

Returns:
Set of PostgreSQL text search configs.
"""
with self._connect_to_database(
connect_to_current_host=True
) as connection, connection.cursor() as cursor:
cursor.execute("SELECT CONCAT('pg_catalog.', cfgname) FROM pg_ts_config;")
text_search_configs = cursor.fetchall()
return {text_search_config[0] for text_search_config in text_search_configs}

def get_postgresql_timezones(self) -> Set[str]:
"""Returns the PostgreSQL available timezones.

Returns:
Set of PostgreSQL timezones.
"""
with self._connect_to_database(
connect_to_current_host=True
) as connection, connection.cursor() as cursor:
cursor.execute("SELECT name FROM pg_timezone_names;")
timezones = cursor.fetchall()
return {timezone[0] for timezone in timezones}

def get_postgresql_version(self) -> str:
"""Returns the PostgreSQL version.

Expand Down Expand Up @@ -445,12 +471,12 @@ def is_restart_pending(self) -> bool:

@staticmethod
def build_postgresql_parameters(
profile: str, available_memory: int, limit_memory: Optional[int] = None
) -> Optional[Dict[str, str]]:
config_options: Dict, available_memory: int, limit_memory: Optional[int] = None
) -> Optional[Dict]:
"""Builds the PostgreSQL parameters.

Args:
profile: the profile to use.
config_options: charm config options containing profile and PostgreSQL parameters.
available_memory: available memory to use in calculation in bytes.
limit_memory: (optional) limit memory to use in calculation in bytes.

Expand All @@ -459,19 +485,60 @@ def build_postgresql_parameters(
"""
if limit_memory:
available_memory = min(available_memory, limit_memory)
profile = config_options["profile"]
logger.debug(f"Building PostgreSQL parameters for {profile=} and {available_memory=}")
parameters = {}
for config, value in config_options.items():
# Filter config option not related to PostgreSQL parameters.
if not config.startswith(
(
"durability",
"instance",
"logging",
"memory",
"optimizer",
"request",
"response",
"vacuum",
)
):
continue
parameter = "_".join(config.split("_")[1:])
if parameter in ["date_style", "time_zone"]:
parameter = "".join(x.capitalize() for x in parameter.split("_"))
parameters[parameter] = value
shared_buffers_max_value = int(int(available_memory * 0.4) / 10**6)
if parameters.get("shared_buffers", 0) > shared_buffers_max_value:
raise Exception(
f"Shared buffers config option should be at most 40% of the available memory, which is {shared_buffers_max_value}MB"
)
if profile == "production":
# Use 25% of the available memory for shared_buffers.
# and the remaind as cache memory.
shared_buffers = int(available_memory * 0.25)
effective_cache_size = int(available_memory - shared_buffers)

parameters = {
"shared_buffers": f"{int(shared_buffers/10**6)}MB",
"effective_cache_size": f"{int(effective_cache_size/10**6)}MB",
}

return parameters
parameters.setdefault("shared_buffers", f"{int(shared_buffers/10**6)}MB")
parameters.update({"effective_cache_size": f"{int(effective_cache_size/10**6)}MB"})
else:
# Return default
return {"shared_buffers": "128MB"}
parameters.setdefault("shared_buffers", "128MB")
return parameters

def validate_date_style(self, date_style: str) -> bool:
"""Validate a date style against PostgreSQL.

Returns:
Whether the date style is valid.
"""
try:
with self._connect_to_database(
connect_to_current_host=True
) as connection, connection.cursor() as cursor:
cursor.execute(
sql.SQL(
"SET DateStyle to {};",
).format(sql.Identifier(date_style))
)
return True
except psycopg2.Error:
return False
16 changes: 0 additions & 16 deletions tests/integration/helpers/postgresql_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from typing import List

import psycopg2
import requests as requests
import yaml
from pytest_operator.plugin import OpsTest

Expand Down Expand Up @@ -91,21 +90,6 @@ async def check_database_creation(
assert len(output)


async def enable_connections_logging(ops_test: OpsTest, unit_name: str) -> None:
"""Turn on the log of all connections made to a PostgreSQL instance.

Args:
ops_test: The ops test framework instance
unit_name: The name of the unit to turn on the connection logs
"""
unit_address = await get_unit_address(ops_test, unit_name)
requests.patch(
f"https://{unit_address}:8008/config",
json={"postgresql": {"parameters": {"log_connections": True}}},
verify=False,
)


async def execute_query_on_unit(
unit_address: str,
user: str,
Expand Down
6 changes: 2 additions & 4 deletions tests/integration/relations/test_backend_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
)
from ..helpers.postgresql_helpers import (
check_database_users_existence,
enable_connections_logging,
get_postgres_primary,
run_command_on_unit,
)
Expand Down Expand Up @@ -122,9 +121,8 @@ async def test_tls_encrypted_connection_to_postgres(ops_test: OpsTest):
await ops_test.model.relate(PG, TLS)
await ops_test.model.wait_for_idle(status="active", timeout=1000)

# Enable additional logs on the PostgreSQL instance to check TLS
# being used in a later step.
await enable_connections_logging(ops_test, f"{PG}/0")
await ops_test.model.applications[PG].set_config({"logging_log_connections": "True"})
await ops_test.model.wait_for_idle(apps=[PG], status="active", idle_period=30)

# Deploy an app and relate it to PgBouncer to open a connection
# between PgBouncer and PostgreSQL.
Expand Down
1 change: 1 addition & 0 deletions tests/integration/relations/test_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ async def test_create_db_legacy_relation(ops_test: OpsTest, pgb_charm):
assert "waltz_standby" not in cfg["databases"].keys()


@pytest.mark.skip(reason="Should be ported and moved to the new relation tests")
async def test_relation_with_indico(ops_test: OpsTest):
"""Test the relation with Indico charm."""
logger.info("Deploying indico")
Expand Down