Skip to content

Commit

Permalink
fix: run localnet on goal command (#380)
Browse files Browse the repository at this point in the history
* feat: run the localnet on goal command if not running

* test: update and re-gen tests

* chore: fix pip audit for pycryptodomex

* chore: fix ruff

* Update src/algokit/cli/goal.py

Co-authored-by: Neil Campbell <neil.campbell@makerx.com.au>

* test: fix a test

* test: fix a test

---------

Co-authored-by: Neil Campbell <neil.campbell@makerx.com.au>
  • Loading branch information
negar-abbasi and neilcampbell authored Jan 9, 2024
1 parent ee35867 commit 5a06ddc
Show file tree
Hide file tree
Showing 17 changed files with 199 additions and 122 deletions.
88 changes: 34 additions & 54 deletions poetry.lock

Large diffs are not rendered by default.

18 changes: 8 additions & 10 deletions src/algokit/cli/goal.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,13 @@ def goal_command(*, console: bool, goal_args: list[str]) -> None:
compose_file_status = sandbox.compose_file_status()
if compose_file_status is not ComposeFileStatus.UP_TO_DATE and sandbox.name == DEFAULT_NAME:
raise click.ClickException("LocalNet definition is out of date; please run `algokit localnet reset` first!")
ps_result = sandbox.ps("algod")
match ps_result:
case [{"State": "running"}]:
pass
case _:
logger.info("LocalNet isn't running")
sandbox.up()

if console:
if goal_args:
Expand All @@ -79,13 +86,4 @@ def goal_command(*, console: bool, goal_args: list[str]) -> None:
post_process(input_files, output_files, volume_mount_path_local)

if result.exit_code != 0:
ps_result = sandbox.ps("algod")
match ps_result:
case [{"State": "running"}]:
pass # container is running, failure must have been with command
case _:
logger.warning(
"algod container does not appear to be running, "
"ensure localnet is started by executing `algokit localnet start`"
)
raise click.exceptions.Exit(result.exit_code) # pass on the exit code
raise click.exceptions.Exit(result.exit_code)
168 changes: 116 additions & 52 deletions tests/goal/test_goal.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from subprocess import CompletedProcess

import pytest
from algokit.core.sandbox import get_algod_network_template, get_config_json, get_docker_compose_yml
from algokit.core.sandbox import ALGOD_HEALTH_URL, get_algod_network_template, get_config_json, get_docker_compose_yml
from pytest_httpx import HTTPXMock
from pytest_mock import MockerFixture

from tests.utils.app_dir_mock import AppDirs
Expand All @@ -18,6 +19,11 @@ def _normalize_output(output: str) -> str:
return output.replace("\\", "/")


@pytest.fixture()
def _health_success(httpx_mock: HTTPXMock) -> None:
httpx_mock.add_response(url=ALGOD_HEALTH_URL)


@pytest.fixture()
def cwd(tmp_path_factory: pytest.TempPathFactory) -> Path:
return tmp_path_factory.mktemp("cwd")
Expand Down Expand Up @@ -57,6 +63,14 @@ def _mock_proc_with_running_localnet(proc_mock: ProcMock) -> None:
proc_mock.set_output("docker compose ls --format json --filter name=algokit_sandbox*", [json.dumps([])])


@pytest.fixture()
def _mock_proc_with_algod_running_state(proc_mock: ProcMock) -> None:
proc_mock.set_output(
cmd=["docker", "compose", "ps", "algod", "--format", "json"],
output=[json.dumps([{"Name": "algokit_sandbox_algod", "State": "running"}])],
)


def dump_file(cwd: Path) -> None:
(cwd / "approval.compiled").write_text(
"""
Expand All @@ -83,78 +97,93 @@ def test_goal_help() -> None:


@pytest.mark.usefixtures(
"proc_mock", "_setup_latest_dummy_compose", "mocked_goal_mount_path", "_mock_proc_with_running_localnet"
"proc_mock",
"_setup_latest_dummy_compose",
"mocked_goal_mount_path",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_no_args() -> None:
def test_goal_no_args(app_dir_mock: AppDirs) -> None:
result = invoke("goal")

assert result.exit_code == 0
verify(result.output)
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures("proc_mock", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
def test_goal_console(mocker: MockerFixture) -> None:
@pytest.mark.usefixtures(
"proc_mock",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_console(mocker: MockerFixture, app_dir_mock: AppDirs) -> None:
mocker.patch("algokit.core.proc.subprocess_run").return_value = CompletedProcess(
["docker", "exec"], 0, "STDOUT+STDERR"
)

result = invoke("goal --console")

assert result.exit_code == 0
verify(result.output)
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures("_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
def test_goal_console_failed(app_dir_mock: AppDirs, proc_mock: ProcMock, mocker: MockerFixture) -> None:
mocker.patch("algokit.core.proc.subprocess_run").return_value = CompletedProcess(
["docker", "exec"], 1, "STDOUT+STDERR"
)
@pytest.mark.usefixtures("_setup_latest_dummy_compose", "_mock_proc_with_running_localnet", "_health_success")
def test_goal_console_algod_not_created(app_dir_mock: AppDirs, proc_mock: ProcMock, mocker: MockerFixture) -> None:
proc_mock.set_output(["docker", "compose", "ps", "algod", "--format", "json"], output=[json.dumps([])])

proc_mock.set_output(
["docker", "compose", "ps", "algod", "--format", "json"],
output=[json.dumps([{"Name": "algokit_sandbox_algod", "State": "running"}])],
mocker.patch("algokit.core.proc.subprocess_run").return_value = CompletedProcess(
["docker", "exec"], 0, "STDOUT+STDERR"
)

result = invoke("goal --console")

assert result.exit_code == 1
assert result.exit_code == 0
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures("_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
def test_goal_console_failed_algod_not_created(
app_dir_mock: AppDirs, proc_mock: ProcMock, mocker: MockerFixture
) -> None:
@pytest.mark.usefixtures(
"proc_mock",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_console_failed(app_dir_mock: AppDirs, mocker: MockerFixture) -> None:
mocker.patch("algokit.core.proc.subprocess_run").return_value = CompletedProcess(
["docker", "exec"], 1, "bad args to goal"
["docker", "exec"], 1, "STDOUT+STDERR"
)

proc_mock.set_output(["docker", "compose", "ps", "algod", "--format", "json"], output=[json.dumps([])])

result = invoke("goal --console")

assert result.exit_code == 1
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures(
"proc_mock", "_setup_latest_dummy_compose", "mocked_goal_mount_path", "_mock_proc_with_running_localnet"
"proc_mock",
"_setup_latest_dummy_compose",
"mocked_goal_mount_path",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_simple_args() -> None:
def test_goal_simple_args(app_dir_mock: AppDirs) -> None:
result = invoke("goal account list")

assert result.exit_code == 0
verify(result.output)
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures(
"proc_mock", "_setup_latest_dummy_compose", "mocked_goal_mount_path", "_mock_proc_with_running_localnet"
"proc_mock",
"_setup_latest_dummy_compose",
"mocked_goal_mount_path",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_complex_args() -> None:
def test_goal_complex_args(app_dir_mock: AppDirs) -> None:
result = invoke("goal account export -a RKTAZY2ZLKUJBHDVVA3KKHEDK7PRVGIGOZAUUIZBNK2OEP6KQGEXKKUYUY")

assert result.exit_code == 0
verify(result.output)
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


def test_goal_start_without_docker(proc_mock: ProcMock) -> None:
Expand All @@ -176,12 +205,17 @@ def test_goal_start_without_docker_engine_running(proc_mock: ProcMock) -> None:


@pytest.mark.usefixtures(
"_setup_input_files", "_setup_latest_dummy_compose", "mocked_goal_mount_path", "_mock_proc_with_running_localnet"
"_setup_input_files",
"_setup_latest_dummy_compose",
"mocked_goal_mount_path",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
@pytest.mark.parametrize("_setup_input_files", [[{"name": "transactions.txt"}]], indirect=True)
def test_goal_simple_args_with_input_file(
proc_mock: ProcMock,
cwd: Path,
app_dir_mock: AppDirs,
) -> None:
expected_arguments = [
"docker",
Expand All @@ -199,16 +233,21 @@ def test_goal_simple_args_with_input_file(
result = invoke("goal clerk group transactions.txt", cwd=cwd)

# Check if the path in command has changed in preprocess step
assert _normalize_output(proc_mock.called[2].command[9]) == "/root/goal_mount/transactions.txt"
assert _normalize_output(proc_mock.called[3].command[9]) == "/root/goal_mount/transactions.txt"

# Check for the result status
assert result.exit_code == 0

verify(_normalize_output(result.output))
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures("mocked_goal_mount_path", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
def test_goal_simple_args_with_output_file(proc_mock: ProcMock, cwd: Path) -> None:
@pytest.mark.usefixtures(
"mocked_goal_mount_path",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_simple_args_with_output_file(proc_mock: ProcMock, cwd: Path, app_dir_mock: AppDirs) -> None:
expected_arguments = [
"docker",
"exec",
Expand All @@ -230,26 +269,31 @@ def test_goal_simple_args_with_output_file(proc_mock: ProcMock, cwd: Path) -> No
result = invoke("goal account dump -o balance_record.json")

# Check if the path in command has changed in preprocess step
assert _normalize_output(proc_mock.called[2].command[10]) == "/root/goal_mount/balance_record.json"
assert _normalize_output(proc_mock.called[3].command[10]) == "/root/goal_mount/balance_record.json"

# Check for the result status
assert result.exit_code == 0

# Check if the output file is actually created and copied in cwd in postprocess step
assert (cwd / "balance_record.json").exists()

verify(_normalize_output(result.output))
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures(
"mocked_goal_mount_path", "_setup_input_files", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet"
"mocked_goal_mount_path",
"_setup_input_files",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
@pytest.mark.parametrize(
"_setup_input_files", [[{"name": "approval.teal", "content": DUMMY_CONTRACT_TEAL}]], indirect=True
)
def test_goal_simple_args_with_input_output_files(
proc_mock: ProcMock,
cwd: Path,
app_dir_mock: AppDirs,
) -> None:
expected_arguments = [
"docker",
Expand All @@ -270,19 +314,23 @@ def test_goal_simple_args_with_input_output_files(
result = invoke("goal clerk compile approval.teal -o approval.compiled", cwd=cwd)

# Check if the paths in command have changed in preprocess step
assert _normalize_output(proc_mock.called[2].command[9]) == "/root/goal_mount/approval.teal"
assert _normalize_output(proc_mock.called[2].command[11]) == "/root/goal_mount/approval.compiled"
assert _normalize_output(proc_mock.called[3].command[9]) == "/root/goal_mount/approval.teal"
assert _normalize_output(proc_mock.called[3].command[11]) == "/root/goal_mount/approval.compiled"

# Check for the result status
assert result.exit_code == 0

# Check if the output file is created and copied in cwd in postprocess step
assert (cwd / "approval.compiled").exists()
verify(_normalize_output(result.output))
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures(
"mocked_goal_mount_path", "_setup_input_files", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet"
"mocked_goal_mount_path",
"_setup_input_files",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
@pytest.mark.parametrize(
"_setup_input_files",
Expand All @@ -297,6 +345,7 @@ def test_goal_simple_args_with_input_output_files(
def test_goal_simple_args_with_multiple_input_output_files(
proc_mock: ProcMock,
cwd: Path,
app_dir_mock: AppDirs,
) -> None:
expected_arguments = [
"docker",
Expand All @@ -316,32 +365,42 @@ def test_goal_simple_args_with_multiple_input_output_files(
result = invoke("goal clerk compile approval1.teal approval2.teal -o approval.compiled", cwd=cwd)

# Check if the paths in command have changed in preprocess step
assert _normalize_output(proc_mock.called[2].command[9]) == "/root/goal_mount/approval1.teal"
assert _normalize_output(proc_mock.called[2].command[10]) == "/root/goal_mount/approval2.teal"
assert _normalize_output(proc_mock.called[2].command[12]) == "/root/goal_mount/approval.compiled"
assert _normalize_output(proc_mock.called[3].command[9]) == "/root/goal_mount/approval1.teal"
assert _normalize_output(proc_mock.called[3].command[10]) == "/root/goal_mount/approval2.teal"
assert _normalize_output(proc_mock.called[3].command[12]) == "/root/goal_mount/approval.compiled"

# Check for the result
assert result.exit_code == 0

# Check if the output file is actually created and copied in cwd in postprocess step
assert (cwd / "approval.compiled").exists()
verify(_normalize_output(result.output))
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures(
"proc_mock", "mocked_goal_mount_path", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet"
"proc_mock",
"mocked_goal_mount_path",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
def test_goal_simple_args_without_file_error(
cwd: Path,
app_dir_mock: AppDirs,
) -> None:
assert not (cwd / "approval.teal").exists()
result = invoke("goal clerk compile approval.teal -o approval.compiled", cwd=cwd)

assert result.exit_code == 1
verify(_normalize_output(result.output))
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))


@pytest.mark.usefixtures("_setup_input_files", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
@pytest.mark.usefixtures(
"_setup_input_files",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
@pytest.mark.parametrize(
"_setup_input_files", [[{"name": "approval.teal", "content": DUMMY_CONTRACT_TEAL}]], indirect=True
)
Expand Down Expand Up @@ -389,7 +448,12 @@ def test_goal_postprocess_of_command_args(
assert (mocked_goal_mount_path / "approval.group.sig.out").exists()


@pytest.mark.usefixtures("_setup_input_files", "_setup_latest_dummy_compose", "_mock_proc_with_running_localnet")
@pytest.mark.usefixtures(
"_setup_input_files",
"_setup_latest_dummy_compose",
"_mock_proc_with_running_localnet",
"_mock_proc_with_algod_running_state",
)
@pytest.mark.parametrize("_setup_input_files", [[{"name": "group.gtxn", "content": ""}]], indirect=True)
def test_goal_postprocess_of_single_output_arg_resulting_in_multiple_output_files(
proc_mock: ProcMock,
Expand Down Expand Up @@ -448,8 +512,8 @@ def test_goal_compose_outdated(
verify(_normalize_output(result.output))


@pytest.mark.usefixtures("_setup_latest_dummy_compose", "mocked_goal_mount_path")
def test_goal_simple_args_on_named_localnet(proc_mock: ProcMock) -> None:
@pytest.mark.usefixtures("_setup_latest_dummy_compose", "mocked_goal_mount_path", "_mock_proc_with_algod_running_state")
def test_goal_simple_args_on_named_localnet(proc_mock: ProcMock, app_dir_mock: AppDirs) -> None:
proc_mock.set_output(
"docker compose ls --format json --filter name=algokit_sandbox*",
[json.dumps([{"Name": "algokit_test", "Status": "running", "ConfigFiles": "to/test/docker-compose.yml"}])],
Expand All @@ -458,4 +522,4 @@ def test_goal_simple_args_on_named_localnet(proc_mock: ProcMock) -> None:
result = invoke("goal account list")

assert result.exit_code == 0
verify(result.output)
verify(_normalize_output(result.output.replace(str(app_dir_mock.app_config_dir), "{app_config}")))
2 changes: 2 additions & 0 deletions tests/goal/test_goal.test_goal_complex_args.approved.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ DEBUG: docker: STDOUT
DEBUG: docker: STDERR
DEBUG: Running 'docker compose ls --format json --filter name=algokit_sandbox*' in '{current_working_directory}'
DEBUG: docker: []
DEBUG: Running 'docker compose ps algod --format json' in '{app_config}/sandbox'
DEBUG: docker: [{"Name": "algokit_sandbox_algod", "State": "running"}]
DEBUG: Running 'docker exec --interactive --workdir /root algokit_sandbox_algod goal account export -a RKTAZY2ZLKUJBHDVVA3KKHEDK7PRVGIGOZAUUIZBNK2OEP6KQGEXKKUYUY' in '{current_working_directory}'
STDOUT
STDERR
Loading

1 comment on commit 5a06ddc

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Coverage

Coverage Report
FileStmtsMissCoverMissing
src/algokit
   __init__.py15753%6–13, 17–24, 32–34
   __main__.py220%1–3
src/algokit/cli
   completions.py108298%83, 98
   deploy.py72790%44, 46, 92–94, 158, 182
   dispenser.py121199%77
   doctor.py48394%142–144
   explore.py501276%34–39, 41–46
   generate.py67396%74–75, 140
   goal.py44198%71
   init.py1901692%277–278, 328, 331–333, 344, 388, 414, 454, 463–465, 468–473, 486
   localnet.py1191587%74–78, 111, 123, 138–148, 161, 206, 227–228
src/algokit/cli/common
   utils.py26292%120, 123
src/algokit/cli/tasks
   assets.py821384%65–66, 72, 74–75, 105, 119, 125–126, 132, 134, 136–137
   ipfs.py51884%52, 80, 92, 94–95, 105–107
   mint.py66494%48, 70, 91, 250
   send_transaction.py651085%52–53, 57, 89, 158, 170–174
   sign_transaction.py59886%21, 28–30, 71–72, 109, 123
   transfer.py39392%26, 90, 117
   utils.py994555%26–34, 40–43, 75–76, 100–101, 125–133, 152–162, 209, 258–259, 279–290, 297–299
   vanity_address.py561082%41, 45–48, 112, 114, 121–123
   wallet.py79495%21, 66, 136, 162
src/algokit/core
   bootstrap.py1612485%103–104, 126, 149, 214, 217, 223–237, 246–251
   conf.py54885%10, 24, 28, 36, 38, 71–73
   deploy.py691184%61–64, 73–75, 79, 84, 91–93
   dispenser.py2022687%91, 123–124, 141–149, 191–192, 198–200, 218–219, 259–260, 318, 332–334, 345–346, 356, 369, 384
   doctor.py65789%67–69, 92–94, 134
   generate.py41295%69, 87
   goal.py60395%30–31, 41
   log_handlers.py68790%50–51, 63, 112–116, 125
   proc.py45198%98
   sandbox.py2181892%62, 73–75, 96, 142–149, 160, 456, 472, 497, 505
   typed_client_generation.py80594%55–57, 70, 75
   utils.py56296%36–37
   version_prompt.py72889%26–27, 39, 58–61, 79, 108
src/algokit/core/tasks
   ipfs.py63789%58–64, 140, 144, 146, 152
   nfd.py491373%25, 31, 34–41, 70–72, 99–101
   vanity_address.py903462%49–50, 54, 59–75, 92–108, 128–131
   wallet.py71593%37, 129, 155–157
src/algokit/core/tasks/mint
   mint.py781087%123–133, 187
   models.py901188%50, 52, 57, 71–74, 85–88
TOTAL328437888% 

Tests Skipped Failures Errors Time
388 0 💤 0 ❌ 0 🔥 13.447s ⏱️

Please sign in to comment.