Skip to content

Commit

Permalink
✅ Added Elastic client test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
nikhilbadyal committed Sep 24, 2023
1 parent c7187ac commit 9ba6285
Show file tree
Hide file tree
Showing 7 changed files with 334 additions and 3 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ jobs:
- name: Display Python version
run: python -c "import sys; print(sys.version)"

- name: Setup ElasticSearch
run: |
bash test/es_bootstrap.sh
- name: Install Requirements
run: |
python -m pip install --upgrade pip
Expand Down
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,18 @@ click==8.1.7
click-params==0.4.1
elasticsearch==8.9.0
Faker==19.6.2
filelock==3.12.4
loguru==0.7.2
pytest==7.4.2
pytest-click==1.1.0
pytest-cov==4.1.0
pytest-elasticsearch @ git+https://github.com/nikhilbadyal/pytest-elasticsearch/@main
pytest-emoji==0.2.0
pytest-loguru==0.2.0
pytest-md==0.2.0
pytest-mock==3.11.1
pytest-xdist==3.3.1
python-dotenv==1.0.0
tenacity==8.2.3
tqdm==4.66.1
typing-extensions==4.8.0
6 changes: 6 additions & 0 deletions test/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
STACK_VERSION=8.9.0
NODES=1
PORT=9200
SECURITY_ENABLED=true
ELASTICSEARCH_PASSWORD=verysecure
PLUGINS=""
140 changes: 137 additions & 3 deletions test/conftest.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,41 @@
"""Conftest for Pytest."""
from __future__ import annotations

import csv
import json
import os
import sys
from typing import Any
from unittest.mock import Mock
from pathlib import Path
from test.esxport._prepare_search_query_test import TestSearchQuery
from typing import TYPE_CHECKING, Any, Iterator
from unittest.mock import Mock, patch

import pytest
from dotenv import load_dotenv
from elasticsearch.helpers import bulk
from faker import Faker
from filelock import FileLock
from pytest_elasticsearch import factories

from esxport.click_opt.cli_options import CliOptions
from esxport.elastic import ElasticsearchClient
from esxport.esxport import EsXport

if TYPE_CHECKING:
from _pytest.tmpdir import TempPathFactory
from elasticsearch import Elasticsearch

load_dotenv(Path(Path(__file__).resolve().parent, ".env"))

elasticsearch_nooproc = factories.elasticsearch_noproc(
port=9200,
scheme="https",
host="localhost",
user="elastic",
password=os.getenv("ELASTICSEARCH_PASSWORD"),
)
elasticsearch_proc = factories.elasticsearch("elasticsearch_nooproc")


@pytest.fixture()
def cli_options() -> CliOptions:
Expand All @@ -19,7 +45,7 @@ def cli_options() -> CliOptions:
{
"query": query,
"output_file": "output.csv",
"url": "http://localhost:9200",
"url": "https://localhost:9200",
"user": "admin",
"password": "password",
"index_prefixes": ["index1", "index2"],
Expand Down Expand Up @@ -116,3 +142,111 @@ def _capture_wrap() -> None:
"""Avoid https://github.com/pytest-dev/pytest/issues/5502."""
sys.stderr.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005
sys.stdout.close = lambda *args: None # type: ignore[method-assign] #noqa: ARG005


@pytest.fixture(scope="session")
def index_name() -> str:
"""Index name."""
return TestSearchQuery.random_string(10).lower()


@pytest.fixture()
def es_index(index_name: str, elasticsearch_proc: Elasticsearch) -> Any:
"""Create index."""
elasticsearch_proc.indices.create(index=index_name)
return index_name


# noinspection PyTypeChecker
def generate_actions(dataset_path: str) -> Iterator[dict[str, Any]]:
"""Reads the file through csv.DictReader() and for each row yields a single document.
This function is passed into the bulk() helper to create many documents in sequence.
"""
with Path(dataset_path).open() as f:
reader = csv.DictReader(f)

for row in reader:
yield {
"id": row["id"],
"name": row["name"],
"email": row["email"],
"phone": row["phone"],
"address": row["address"] or None,
}


@pytest.fixture()
def populate_data(es_index: str, elasticsearch_proc: Elasticsearch) -> Elasticsearch:
"""Populates the data in elastic instances."""
bulk(
client=elasticsearch_proc,
index=es_index,
actions=generate_actions(f"{es_index}.csv"),
)
return elasticsearch_proc


@pytest.fixture()
def elastic_client(
cli_options: CliOptions,
populate_data: Elasticsearch,
generate_test_csv: str, # noqa: ARG001
) -> Iterator[ElasticsearchClient]:
"""Patches Elasticsearch client."""
es_client = ElasticsearchClient(cli_options)
with patch.object(es_client, "client", populate_data):
yield es_client


def _create_csv(csv_file_name: str) -> None:
# Create a Faker instance
fake = Faker()

# Define the number of rows you want in your CSV
num_rows = TestSearchQuery.random_number(10, 20)

# Define the CSV header
csv_header = ["id", "name", "email", "phone", "address"]

# Generate random data and write it to the CSV file
with Path(csv_file_name).open("w", newline="") as csvfile:
writer = csv.writer(csvfile)

# Write the header row
writer.writerow(csv_header)

# Generate and write random data rows
for i in range(1, num_rows + 1):
name = fake.name()
email = fake.email()
phone = fake.phone_number()
address = fake.address()

# Write the data to the CSV file
writer.writerow([i, name, email, phone, address])


# https://github.com/pytest-dev/pytest-xdist/issues/271
@pytest.fixture(scope="session")
def generate_test_csv(index_name: str, tmp_path_factory: TempPathFactory, worker_id: str) -> Iterator[str]:
"""Generate random csv for testing."""
csv_file_name = f"{index_name}.csv"

if worker_id == "master":
yield csv_file_name

# get the temp directory shared by all workers
root_tmp_dir = tmp_path_factory.getbasetemp().parent

fn = root_tmp_dir / "data.json"
with FileLock(f"{fn!s}.lock"):
if fn.is_file():
data = json.loads(fn.read_text())
else:
_create_csv(csv_file_name)
data = csv_file_name
fn.write_text(json.dumps(data))

yield data
Path(csv_file_name).unlink(missing_ok=True)
1 change: 1 addition & 0 deletions test/elastic/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Elasticsearch test cases."""
17 changes: 17 additions & 0 deletions test/elastic/client_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Client Test cases."""
from __future__ import annotations

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing_extensions import Self

from esxport.elastic import ElasticsearchClient


class TestElasticsearchClient:
"""Elastic Client Test cases."""

def test_index_exists(self: Self, es_index: str, elastic_client: ElasticsearchClient) -> None:
"""Test client return true when index exists."""
assert elastic_client.indices_exists(index=es_index) is True
166 changes: 166 additions & 0 deletions test/es_bootstrap.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
#!/bin/bash

set -o allexport
source test/.env
set +o allexport
set -euxo pipefail

if [[ -z $STACK_VERSION ]]; then
echo -e "\033[31;1mERROR:\033[0m Required environment variable [STACK_VERSION] not set\033[0m"
exit 1
fi

MAJOR_VERSION="$(echo "${STACK_VERSION}" | cut -c 1)"
network_name=elastic
if ! docker network inspect "$network_name" &>/dev/null; then
docker network create "$network_name"
echo "Created network: $network_name"
else
echo "Network $network_name already exists."
fi

mkdir -p "$(pwd)"/es/plugins

if [[ ! -z $PLUGINS ]]; then
docker run --rm \
--network=elastic \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
--entrypoint=/usr/share/elasticsearch/bin/elasticsearch-plugin \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}" \
install "${PLUGINS/\\n/ }" --batch
fi

for (( node=1; node<=${NODES-1}; node++ ))
do
port_com=$((9300 + node - 1))
UNICAST_HOSTS+="es$node:${port_com},"
done

for (( node=1; node<=${NODES-1}; node++ ))
do
port=$((${PORT:-9200} + node - 1))
port_com=$((9300 + node - 1))
if [ "x${MAJOR_VERSION}" == 'x6' ]; then
docker run \
--rm \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.security.enabled=false" \
--env "xpack.license.self_generated.type=basic" \
--env "discovery.zen.ping.unicast.hosts=${UNICAST_HOSTS}" \
--env "discovery.zen.minimum_master_nodes=${NODES}" \
--env "http.port=${port}" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--publish "${port_com}:${port_com}" \
--detach \
--network=elastic \
--name="es${node}" \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
elif [ "x${MAJOR_VERSION}" == 'x7' ]; then
docker run \
--rm \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.security.enabled=false" \
--env "xpack.license.self_generated.type=basic" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--detach \
--network=elastic \
--name="es${node}" \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
elif [ "x${MAJOR_VERSION}" == 'x8' ]; then
if [ "${SECURITY_ENABLED}" == 'true' ]; then
elasticsearch_password=${ELASTICSEARCH_PASSWORD-'changeme'}
docker run \
--rm \
--env "ELASTIC_PASSWORD=${elasticsearch_password}" \
--env "xpack.license.self_generated.type=basic" \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--network=elastic \
--name="es${node}" \
--detach \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
else
docker run \
--rm \
--env "xpack.security.enabled=false" \
--env "node.name=es${node}" \
--env "cluster.name=docker-elasticsearch" \
--env "cluster.initial_master_nodes=es1" \
--env "discovery.seed_hosts=es1" \
--env "cluster.routing.allocation.disk.threshold_enabled=false" \
--env "bootstrap.memory_lock=true" \
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
--env "xpack.license.self_generated.type=basic" \
--env "http.port=${port}" \
--env "action.destructive_requires_name=false" \
--ulimit nofile=65536:65536 \
--ulimit memlock=-1:-1 \
--publish "${port}:${port}" \
--network=elastic \
--name="es${node}" \
--detach \
-v "$(pwd)"/es/plugins/:/usr/share/elasticsearch/plugins/ \
docker.elastic.co/elasticsearch/elasticsearch:"${STACK_VERSION}"
fi
fi
done

if [ "x${MAJOR_VERSION}" == 'x8' ] && [ "${SECURITY_ENABLED}" == 'true' ]; then
docker run \
--network elastic \
--rm \
alpine/curl \
--max-time 120 \
--retry 120 \
--retry-delay 1 \
--retry-connrefused \
--show-error \
--silent \
-k \
-u elastic:"${ELASTICSEARCH_PASSWORD-'changeme'}" \
https://es1:"$PORT"
else
docker run \
--network elastic \
--rm \
alpine/curl \
--max-time 120 \
--retry 120 \
--retry-delay 1 \
--retry-connrefused \
--show-error \
--silent \
http://es1:"$PORT"
fi


echo "Elasticsearch up and running"

0 comments on commit 9ba6285

Please sign in to comment.