Use fake HTTP server for the Patroni API in tests
We introduce a patroni_api fixture, defined in tests/conftest.py, which sets up an HTTP server serving files in a temporary directory. The server is itself defined by the PatroniAPI class; it has a 'routes()' context manager method to be used in actual tests to setup expected responses based on specified JSON files. We set up some logging in order to improve debugging. The direct advantage of this is that PatroniResource.rest_api() method is now covered by the test suite. Coverage before this commit: Name Stmts Miss Cover ----------------------------------------------- check_patroni/__init__.py 3 0 100% check_patroni/cli.py 193 18 91% check_patroni/cluster.py 113 0 100% check_patroni/convert.py 23 5 78% check_patroni/node.py 146 1 99% check_patroni/types.py 50 23 54% ----------------------------------------------- TOTAL 528 47 91% and after this commit: Name Stmts Miss Cover ----------------------------------------------- check_patroni/__init__.py 3 0 100% check_patroni/cli.py 193 18 91% check_patroni/cluster.py 113 0 100% check_patroni/convert.py 23 5 78% check_patroni/node.py 146 1 99% check_patroni/types.py 50 9 82% ----------------------------------------------- TOTAL 528 33 94% In actual test functions, we either invoke patroni_api.routes() to configure which JSON file(s) should be served for each endpoint, or we define dedicated fixtures (e.g. cluster_config_has_changed()) to configure this for several test functions or the whole module. The 'old_replica_state' parametrized fixture is used when needed to adjust such fixtures, e.g. in cluster_has_replica_ok(), to modify the JSON content using cluster_api_set_replica_running() (previously in tests/tools.py, now in tests/__init__.py). The dependency on pytest-mock is no longer needed.
This commit is contained in:
parent
32e06f7051
commit
903b83e211
|
@ -8,6 +8,9 @@
|
|||
|
||||
### Misc
|
||||
|
||||
* Improve test coverage by running an HTTP server to fake the Patroni API (#55
|
||||
by @dlax).
|
||||
|
||||
## check_patroni 1.0.0 - 2023-08-28
|
||||
|
||||
Check patroni is now tagged as Production/Stable.
|
||||
|
|
|
@ -48,10 +48,9 @@ The `README.md` can be generated with `./docs/make_readme.sh`.
|
|||
## Executing Tests
|
||||
|
||||
Crafting repeatable tests using a live Patroni cluster can be intricate. To
|
||||
simplify the development process, interactions with Patroni's API are
|
||||
substituted with a mock function that yields an HTTP return code and a JSON
|
||||
object outlining the cluster's status. The JSON files containing this
|
||||
information are housed in the `./tests/json` directory.
|
||||
simplify the development process, a fake HTTP server is set up as a test
|
||||
fixture and serves static files (either from `tests/json` directory or from
|
||||
in-memory data).
|
||||
|
||||
An important consideration is that there is a potential drawback: if the JSON
|
||||
data is incorrect or if modifications have been made to Patroni without
|
||||
|
|
|
@ -5,7 +5,6 @@ flake8
|
|||
mypy==0.961
|
||||
pytest
|
||||
pytest-cov
|
||||
pytest-mock
|
||||
types-requests
|
||||
setuptools
|
||||
tox
|
||||
|
|
1
setup.py
1
setup.py
|
@ -46,7 +46,6 @@ setup(
|
|||
extras_require={
|
||||
"test": [
|
||||
"pytest",
|
||||
"pytest-mock",
|
||||
],
|
||||
},
|
||||
entry_points={
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
import json
|
||||
import logging
|
||||
import shutil
|
||||
from contextlib import contextmanager
|
||||
from functools import partial
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator, Mapping, Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PatroniAPI(HTTPServer):
|
||||
def __init__(self, directory: Path, *, datadir: Path) -> None:
|
||||
self.directory = directory
|
||||
self.datadir = datadir
|
||||
handler_cls = partial(SimpleHTTPRequestHandler, directory=str(directory))
|
||||
super().__init__(("", 0), handler_cls)
|
||||
|
||||
def serve_forever(self, *args: Any) -> None:
|
||||
logger.info(
|
||||
"starting fake Patroni API at %s (directory=%s)",
|
||||
self.endpoint,
|
||||
self.directory,
|
||||
)
|
||||
return super().serve_forever(*args)
|
||||
|
||||
@property
|
||||
def endpoint(self) -> str:
|
||||
return f"http://{self.server_name}:{self.server_port}"
|
||||
|
||||
@contextmanager
|
||||
def routes(self, mapping: Mapping[str, Union[Path, str]]) -> Iterator[None]:
|
||||
"""Temporarily install specified files in served directory, thus
|
||||
building "routes" from given mapping.
|
||||
|
||||
The 'mapping' defines target route paths as keys and files to be
|
||||
installed in served directory as values. Mapping values of type 'str'
|
||||
are assumed be relative file path to the 'datadir'.
|
||||
"""
|
||||
for route_path, fpath in mapping.items():
|
||||
if isinstance(fpath, str):
|
||||
fpath = self.datadir / fpath
|
||||
shutil.copy(fpath, self.directory / route_path)
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
for fname in mapping:
|
||||
(self.directory / fname).unlink()
|
||||
|
||||
|
||||
def cluster_api_set_replica_running(in_json: Path, target_dir: Path) -> Path:
|
||||
# starting from 3.0.4 the state of replicas is streaming instead of running
|
||||
with in_json.open() as f:
|
||||
js = json.load(f)
|
||||
for node in js["members"]:
|
||||
if node["role"] in ["replica", "sync_standby"]:
|
||||
if node["state"] == "streaming":
|
||||
node["state"] = "running"
|
||||
assert target_dir.is_dir()
|
||||
out_json = target_dir / in_json.name
|
||||
with out_json.open("w") as f:
|
||||
json.dump(js, f)
|
||||
return out_json
|
|
@ -1,11 +1,11 @@
|
|||
from functools import partial
|
||||
from typing import Any, Callable
|
||||
from pathlib import Path
|
||||
from threading import Thread
|
||||
from typing import Any, Iterator
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from .tools import my_mock
|
||||
from . import PatroniAPI
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
|
@ -16,9 +16,24 @@ def old_replica_state(request: Any) -> Any:
|
|||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_restapi(mocker: MockerFixture) -> Callable[..., Any]:
|
||||
return partial(my_mock, mocker)
|
||||
@pytest.fixture(scope="session")
|
||||
def datadir() -> Path:
|
||||
return Path(__file__).parent / "json"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def patroni_api(
|
||||
tmp_path_factory: pytest.TempPathFactory, datadir: Path
|
||||
) -> Iterator[PatroniAPI]:
|
||||
"""A fake HTTP server for the Patroni API serving files from a temporary
|
||||
directory.
|
||||
"""
|
||||
httpd = PatroniAPI(tmp_path_factory.mktemp("api"), datadir=datadir)
|
||||
t = Thread(target=httpd.serve_forever)
|
||||
t.start()
|
||||
yield httpd
|
||||
httpd.shutdown()
|
||||
t.join()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -2,18 +2,19 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_api_status_code_200(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_pending_restart_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_pending_restart"]
|
||||
)
|
||||
|
||||
def test_api_status_code_200(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"patroni": "node_is_pending_restart_ok.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "node_is_pending_restart"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_api_status_code_404(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("Fake test", status=404)
|
||||
def test_api_status_code_404(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_pending_restart"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_pending_restart"]
|
||||
)
|
||||
assert result.exit_code == 3
|
||||
|
|
|
@ -1,20 +1,29 @@
|
|||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
import nagiosplugin
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def cluster_config_has_changed(patroni_api: PatroniAPI) -> Iterator[None]:
|
||||
with patroni_api.routes({"config": "cluster_config_has_changed.json"}):
|
||||
yield None
|
||||
|
||||
|
||||
def test_cluster_config_has_changed_ok_with_hash(
|
||||
runner: CliRunner, fake_restapi
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_config_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--hash",
|
||||
"96b12d82571473d13e890b893734e731",
|
||||
|
@ -28,18 +37,17 @@ def test_cluster_config_has_changed_ok_with_hash(
|
|||
|
||||
|
||||
def test_cluster_config_has_changed_ok_with_state_file(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
state_file = tmp_path / "cluster_config_has_changed.state_file"
|
||||
with state_file.open("w") as f:
|
||||
f.write('{"hash": "96b12d82571473d13e890b893734e731"}')
|
||||
|
||||
fake_restapi("cluster_config_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -53,14 +61,13 @@ def test_cluster_config_has_changed_ok_with_state_file(
|
|||
|
||||
|
||||
def test_cluster_config_has_changed_ko_with_hash(
|
||||
runner: CliRunner, fake_restapi
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_config_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--hash",
|
||||
"96b12d82571473d13e890b8937ffffff",
|
||||
|
@ -74,19 +81,18 @@ def test_cluster_config_has_changed_ko_with_hash(
|
|||
|
||||
|
||||
def test_cluster_config_has_changed_ko_with_state_file_and_save(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
state_file = tmp_path / "cluster_config_has_changed.state_file"
|
||||
with state_file.open("w") as f:
|
||||
f.write('{"hash": "96b12d82571473d13e890b8937ffffff"}')
|
||||
|
||||
fake_restapi("cluster_config_has_changed")
|
||||
# test without saving the new hash
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -111,7 +117,7 @@ def test_cluster_config_has_changed_ko_with_state_file_and_save(
|
|||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -133,16 +139,15 @@ def test_cluster_config_has_changed_ko_with_state_file_and_save(
|
|||
|
||||
|
||||
def test_cluster_config_has_changed_params(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
# This one is placed last because it seems like the exceptions are not flushed from stderr for the next tests.
|
||||
fake_state_file = tmp_path / "fake_file_name.state_file"
|
||||
fake_restapi("cluster_config_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_config_has_changed",
|
||||
"--hash",
|
||||
"640df9f0211c791723f18fc3ed9dbb95",
|
||||
|
|
|
@ -2,12 +2,12 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_cluster_has_leader_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_has_leader_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_leader"]
|
||||
)
|
||||
|
||||
def test_cluster_has_leader_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_has_leader_ok.json"}):
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "cluster_has_leader"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -15,11 +15,11 @@ def test_cluster_has_leader_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_cluster_has_leader_ok_standby_leader(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_has_leader_ok_standby_leader")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_leader"]
|
||||
)
|
||||
def test_cluster_has_leader_ok_standby_leader(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_has_leader_ok_standby_leader.json"}):
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "cluster_has_leader"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -27,11 +27,9 @@ def test_cluster_has_leader_ok_standby_leader(runner: CliRunner, fake_restapi) -
|
|||
)
|
||||
|
||||
|
||||
def test_cluster_has_leader_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_has_leader_ko")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_leader"]
|
||||
)
|
||||
def test_cluster_has_leader_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_has_leader_ko.json"}):
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "cluster_has_leader"])
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -1,16 +1,29 @@
|
|||
from pathlib import Path
|
||||
from typing import Iterator, Union
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI, cluster_api_set_replica_running
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_has_replica_ok(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_has_replica_ok.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
# TODO Lag threshold tests
|
||||
def test_cluster_has_relica_ok(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ok", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_replica"]
|
||||
)
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ok")
|
||||
def test_cluster_has_relica_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "cluster_has_replica"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -18,15 +31,15 @@ def test_cluster_has_relica_ok(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ok")
|
||||
def test_cluster_has_replica_ok_with_count_thresholds(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ok", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--warning",
|
||||
"@1",
|
||||
|
@ -41,15 +54,15 @@ def test_cluster_has_replica_ok_with_count_thresholds(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ok")
|
||||
def test_cluster_has_replica_ok_with_sync_count_thresholds(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ok", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--sync-warning",
|
||||
"1:",
|
||||
|
@ -62,15 +75,26 @@ def test_cluster_has_replica_ok_with_sync_count_thresholds(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_has_replica_ok_lag(
|
||||
patroni_api: PatroniAPI, datadir: Path, tmp_path: Path, old_replica_state: bool
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_has_replica_ok_lag.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ok_lag")
|
||||
def test_cluster_has_replica_ok_with_count_thresholds_lag(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ok_lag", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--warning",
|
||||
"@1",
|
||||
|
@ -87,15 +111,26 @@ def test_cluster_has_replica_ok_with_count_thresholds_lag(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_has_replica_ko(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_has_replica_ko.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ko")
|
||||
def test_cluster_has_replica_ko_with_count_thresholds(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ko", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--warning",
|
||||
"@1",
|
||||
|
@ -110,15 +145,15 @@ def test_cluster_has_replica_ko_with_count_thresholds(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ko")
|
||||
def test_cluster_has_replica_ko_with_sync_count_thresholds(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ko", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--sync-warning",
|
||||
"2:",
|
||||
|
@ -133,15 +168,26 @@ def test_cluster_has_replica_ko_with_sync_count_thresholds(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_has_replica_ko_lag(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_has_replica_ko_lag.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_has_replica_ko_lag")
|
||||
def test_cluster_has_replica_ko_with_count_thresholds_and_lag(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_replica_ko_lag", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_has_replica",
|
||||
"--warning",
|
||||
"@1",
|
||||
|
|
|
@ -2,12 +2,16 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_cluster_has_scheduled_action_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_has_scheduled_action_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_scheduled_action"]
|
||||
)
|
||||
|
||||
def test_cluster_has_scheduled_action_ok(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_has_scheduled_action_ok.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_has_scheduled_action"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -16,12 +20,14 @@ def test_cluster_has_scheduled_action_ok(runner: CliRunner, fake_restapi) -> Non
|
|||
|
||||
|
||||
def test_cluster_has_scheduled_action_ko_switchover(
|
||||
runner: CliRunner, fake_restapi
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_scheduled_action_ko_switchover")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_scheduled_action"]
|
||||
)
|
||||
with patroni_api.routes(
|
||||
{"cluster": "cluster_has_scheduled_action_ko_switchover.json"}
|
||||
):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_has_scheduled_action"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -30,12 +36,14 @@ def test_cluster_has_scheduled_action_ko_switchover(
|
|||
|
||||
|
||||
def test_cluster_has_scheduled_action_ko_restart(
|
||||
runner: CliRunner, fake_restapi
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_has_scheduled_action_ko_restart")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_has_scheduled_action"]
|
||||
)
|
||||
with patroni_api.routes(
|
||||
{"cluster": "cluster_has_scheduled_action_ko_restart.json"}
|
||||
):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_has_scheduled_action"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -2,12 +2,16 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_cluster_is_in_maintenance_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_is_in_maintenance_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_is_in_maintenance"]
|
||||
)
|
||||
|
||||
def test_cluster_is_in_maintenance_ok(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_is_in_maintenance_ok.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_is_in_maintenance"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -15,11 +19,13 @@ def test_cluster_is_in_maintenance_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_cluster_is_in_maintenance_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("cluster_is_in_maintenance_ko")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_is_in_maintenance"]
|
||||
)
|
||||
def test_cluster_is_in_maintenance_ko(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
with patroni_api.routes({"cluster": "cluster_is_in_maintenance_ko.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_is_in_maintenance"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -28,12 +34,14 @@ def test_cluster_is_in_maintenance_ko(runner: CliRunner, fake_restapi) -> None:
|
|||
|
||||
|
||||
def test_cluster_is_in_maintenance_ok_pause_false(
|
||||
runner: CliRunner, fake_restapi
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_is_in_maintenance_ok_pause_false")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_is_in_maintenance"]
|
||||
)
|
||||
with patroni_api.routes(
|
||||
{"cluster": "cluster_is_in_maintenance_ok_pause_false.json"}
|
||||
):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "cluster_is_in_maintenance"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -1,15 +1,30 @@
|
|||
from pathlib import Path
|
||||
from typing import Iterator, Union
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI, cluster_api_set_replica_running
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_node_count_ok(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_node_count_ok.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_ok")
|
||||
def test_cluster_node_count_ok(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI, old_replica_state: bool
|
||||
) -> None:
|
||||
fake_restapi("cluster_node_count_ok", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "cluster_node_count"]
|
||||
)
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "cluster_node_count"])
|
||||
assert result.exit_code == 0
|
||||
if old_replica_state:
|
||||
assert (
|
||||
|
@ -23,15 +38,15 @@ def test_cluster_node_count_ok(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_ok")
|
||||
def test_cluster_node_count_ok_with_thresholds(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI, old_replica_state: bool
|
||||
) -> None:
|
||||
fake_restapi("cluster_node_count_ok", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_node_count",
|
||||
"--warning",
|
||||
"@0:1",
|
||||
|
@ -56,17 +71,26 @@ def test_cluster_node_count_ok_with_thresholds(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_node_count_healthy_warning(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_node_count_healthy_warning.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_healthy_warning")
|
||||
def test_cluster_node_count_healthy_warning(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI, old_replica_state: bool
|
||||
) -> None:
|
||||
fake_restapi(
|
||||
"cluster_node_count_healthy_warning", use_old_replica_state=old_replica_state
|
||||
)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_node_count",
|
||||
"--healthy-warning",
|
||||
"@2",
|
||||
|
@ -87,17 +111,26 @@ def test_cluster_node_count_healthy_warning(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_node_count_healthy_critical(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_node_count_healthy_critical.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_healthy_critical")
|
||||
def test_cluster_node_count_healthy_critical(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi(
|
||||
"cluster_node_count_healthy_critical", use_old_replica_state=old_replica_state
|
||||
)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_node_count",
|
||||
"--healthy-warning",
|
||||
"@2",
|
||||
|
@ -112,15 +145,26 @@ def test_cluster_node_count_healthy_critical(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_node_count_warning(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_node_count_warning.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_warning")
|
||||
def test_cluster_node_count_warning(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI, old_replica_state: bool
|
||||
) -> None:
|
||||
fake_restapi("cluster_node_count_warning", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_node_count",
|
||||
"--warning",
|
||||
"@2",
|
||||
|
@ -141,15 +185,26 @@ def test_cluster_node_count_warning(
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cluster_node_count_critical(
|
||||
patroni_api: PatroniAPI, old_replica_state: bool, datadir: Path, tmp_path: Path
|
||||
) -> Iterator[None]:
|
||||
path: Union[str, Path] = "cluster_node_count_critical.json"
|
||||
if old_replica_state:
|
||||
path = cluster_api_set_replica_running(datadir / path, tmp_path)
|
||||
with patroni_api.routes({"cluster": path}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cluster_node_count_critical")
|
||||
def test_cluster_node_count_critical(
|
||||
runner: CliRunner, fake_restapi, old_replica_state: bool
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
fake_restapi("cluster_node_count_critical", use_old_replica_state=old_replica_state)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"cluster_node_count",
|
||||
"--warning",
|
||||
"@2",
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
from pathlib import Path
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_is_alive_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi(None)
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_alive"])
|
||||
|
||||
def test_node_is_alive_ok(
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
liveness = tmp_path / "liveness"
|
||||
liveness.touch()
|
||||
with patroni_api.routes({"liveness": liveness}):
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_alive"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -13,9 +21,8 @@ def test_node_is_alive_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_alive_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi(None, status=404)
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_alive"])
|
||||
def test_node_is_alive_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_alive"])
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -1,23 +1,37 @@
|
|||
from typing import Iterator
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_is_leader_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_leader_ok")
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_leader"])
|
||||
|
||||
@pytest.fixture
|
||||
def node_is_leader_ok(patroni_api: PatroniAPI) -> Iterator[None]:
|
||||
with patroni_api.routes(
|
||||
{
|
||||
"leader": "node_is_leader_ok.json",
|
||||
"standby-leader": "node_is_leader_ok_standby_leader.json",
|
||||
}
|
||||
):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_is_leader_ok")
|
||||
def test_node_is_leader_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_leader"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
== "NODEISLEADER OK - This node is a leader node. | is_leader=1;;@0\n"
|
||||
)
|
||||
|
||||
fake_restapi("node_is_leader_ok_standby_leader")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
["-e", "https://10.20.199.3:8008", "node_is_leader", "--is-standby-leader"],
|
||||
["-e", patroni_api.endpoint, "node_is_leader", "--is-standby-leader"],
|
||||
)
|
||||
print(result.stdout)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -25,19 +39,17 @@ def test_node_is_leader_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_leader_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_leader_ko", status=503)
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_leader"])
|
||||
def test_node_is_leader_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_leader"])
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
== "NODEISLEADER CRITICAL - This node is not a leader node. | is_leader=0;;@0\n"
|
||||
)
|
||||
|
||||
fake_restapi("node_is_leader_ko_standby_leader", status=503)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
["-e", "https://10.20.199.3:8008", "node_is_leader", "--is-standby-leader"],
|
||||
["-e", patroni_api.endpoint, "node_is_leader", "--is-standby-leader"],
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
|
|
|
@ -2,12 +2,14 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_is_pending_restart_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_pending_restart_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_pending_restart"]
|
||||
)
|
||||
|
||||
def test_node_is_pending_restart_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"patroni": "node_is_pending_restart_ok.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "node_is_pending_restart"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -15,11 +17,11 @@ def test_node_is_pending_restart_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_pending_restart_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_pending_restart_ko")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_pending_restart"]
|
||||
)
|
||||
def test_node_is_pending_restart_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"patroni": "node_is_pending_restart_ko.json"}):
|
||||
result = runner.invoke(
|
||||
main, ["-e", patroni_api.endpoint, "node_is_pending_restart"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -2,10 +2,12 @@ from click.testing import CliRunner
|
|||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_is_primary_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_primary_ok")
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_primary"])
|
||||
|
||||
def test_node_is_primary_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
with patroni_api.routes({"primary": "node_is_primary_ok.json"}):
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_primary"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -13,9 +15,8 @@ def test_node_is_primary_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_primary_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_primary_ko", status=503)
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_primary"])
|
||||
def test_node_is_primary_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_primary"])
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -1,11 +1,27 @@
|
|||
from typing import Iterator
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_is_replica_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_replica_ok")
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_replica"])
|
||||
|
||||
@pytest.fixture
|
||||
def node_is_replica_ok(patroni_api: PatroniAPI) -> Iterator[None]:
|
||||
with patroni_api.routes(
|
||||
{
|
||||
k: "node_is_replica_ok.json"
|
||||
for k in ("replica", "synchronous", "asynchronous")
|
||||
}
|
||||
):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_is_replica_ok")
|
||||
def test_node_is_replica_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_replica"])
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -13,9 +29,8 @@ def test_node_is_replica_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_is_replica_ko", status=503)
|
||||
result = runner.invoke(main, ["-e", "https://10.20.199.3:8008", "node_is_replica"])
|
||||
def test_node_is_replica_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_is_replica"])
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
result.stdout
|
||||
|
@ -23,11 +38,10 @@ def test_node_is_replica_ko(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_ko_lag(runner: CliRunner, fake_restapi) -> None:
|
||||
def test_node_is_replica_ko_lag(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok", status=503)
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_replica", "--max-lag", "100"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_replica", "--max-lag", "100"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
|
@ -35,12 +49,11 @@ def test_node_is_replica_ko_lag(runner: CliRunner, fake_restapi) -> None:
|
|||
== "NODEISREPLICA CRITICAL - This node is not a running replica with no noloadbalance tag and a lag under 100. | is_replica=0;;@0\n"
|
||||
)
|
||||
|
||||
fake_restapi("node_is_replica_ok", status=503)
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_is_replica",
|
||||
"--is-async",
|
||||
"--max-lag",
|
||||
|
@ -54,11 +67,11 @@ def test_node_is_replica_ko_lag(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_sync_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
@pytest.mark.usefixtures("node_is_replica_ok")
|
||||
def test_node_is_replica_sync_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_replica", "--is-sync"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_replica", "--is-sync"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
|
@ -67,11 +80,10 @@ def test_node_is_replica_sync_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_sync_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
def test_node_is_replica_sync_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok", status=503)
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_replica", "--is-sync"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_replica", "--is-sync"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
|
@ -80,11 +92,11 @@ def test_node_is_replica_sync_ko(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_async_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
@pytest.mark.usefixtures("node_is_replica_ok")
|
||||
def test_node_is_replica_async_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok")
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_replica", "--is-async"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_replica", "--is-async"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert (
|
||||
|
@ -93,11 +105,10 @@ def test_node_is_replica_async_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_async_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
def test_node_is_replica_async_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok", status=503)
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_is_replica", "--is-async"]
|
||||
main, ["-e", patroni_api.endpoint, "node_is_replica", "--is-async"]
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
assert (
|
||||
|
@ -106,14 +117,14 @@ def test_node_is_replica_async_ko(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_is_replica_params(runner: CliRunner, fake_restapi) -> None:
|
||||
@pytest.mark.usefixtures("node_is_replica_ok")
|
||||
def test_node_is_replica_params(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_is_replica",
|
||||
"--is-async",
|
||||
"--is-sync",
|
||||
|
@ -126,12 +137,11 @@ def test_node_is_replica_params(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
# We don't do the check ourselves, patroni does it and changes the return code
|
||||
fake_restapi("node_is_replica_ok")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_is_replica",
|
||||
"--is-sync",
|
||||
"--max-lag",
|
||||
|
|
|
@ -1,15 +1,25 @@
|
|||
from typing import Iterator
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_patroni_version_ok(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_patroni_version")
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def node_patroni_version(patroni_api: PatroniAPI) -> Iterator[None]:
|
||||
with patroni_api.routes({"patroni": "node_patroni_version.json"}):
|
||||
yield None
|
||||
|
||||
|
||||
def test_node_patroni_version_ok(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_patroni_version",
|
||||
"--patroni-version",
|
||||
"2.0.2",
|
||||
|
@ -22,13 +32,12 @@ def test_node_patroni_version_ok(runner: CliRunner, fake_restapi) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_node_patroni_version_ko(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_patroni_version")
|
||||
def test_node_patroni_version_ko(runner: CliRunner, patroni_api: PatroniAPI) -> None:
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_patroni_version",
|
||||
"--patroni-version",
|
||||
"1.0.0",
|
||||
|
|
|
@ -1,18 +1,30 @@
|
|||
from pathlib import Path
|
||||
from typing import Iterator
|
||||
|
||||
import nagiosplugin
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from check_patroni.cli import main
|
||||
|
||||
from . import PatroniAPI
|
||||
|
||||
def test_node_tl_has_changed_ok_with_timeline(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_tl_has_changed")
|
||||
|
||||
@pytest.fixture
|
||||
def node_tl_has_changed(patroni_api: PatroniAPI) -> Iterator[None]:
|
||||
with patroni_api.routes({"patroni": "node_tl_has_changed.json"}):
|
||||
yield None
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_tl_has_changed")
|
||||
def test_node_tl_has_changed_ok_with_timeline(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--timeline",
|
||||
"58",
|
||||
|
@ -25,19 +37,19 @@ def test_node_tl_has_changed_ok_with_timeline(runner: CliRunner, fake_restapi) -
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_tl_has_changed")
|
||||
def test_node_tl_has_changed_ok_with_state_file(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
state_file = tmp_path / "node_tl_has_changed.state_file"
|
||||
with state_file.open("w") as f:
|
||||
f.write('{"timeline": 58}')
|
||||
|
||||
fake_restapi("node_tl_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -50,13 +62,15 @@ def test_node_tl_has_changed_ok_with_state_file(
|
|||
)
|
||||
|
||||
|
||||
def test_node_tl_has_changed_ko_with_timeline(runner: CliRunner, fake_restapi) -> None:
|
||||
fake_restapi("node_tl_has_changed")
|
||||
@pytest.mark.usefixtures("node_tl_has_changed")
|
||||
def test_node_tl_has_changed_ko_with_timeline(
|
||||
runner: CliRunner, patroni_api: PatroniAPI
|
||||
) -> None:
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--timeline",
|
||||
"700",
|
||||
|
@ -69,20 +83,20 @@ def test_node_tl_has_changed_ko_with_timeline(runner: CliRunner, fake_restapi) -
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_tl_has_changed")
|
||||
def test_node_tl_has_changed_ko_with_state_file_and_save(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
state_file = tmp_path / "node_tl_has_changed.state_file"
|
||||
with state_file.open("w") as f:
|
||||
f.write('{"timeline": 700}')
|
||||
|
||||
fake_restapi("node_tl_has_changed")
|
||||
# test without saving the new tl
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -106,7 +120,7 @@ def test_node_tl_has_changed_ko_with_state_file_and_save(
|
|||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--state-file",
|
||||
str(state_file),
|
||||
|
@ -127,17 +141,17 @@ def test_node_tl_has_changed_ko_with_state_file_and_save(
|
|||
assert new_tl == 58
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("node_tl_has_changed")
|
||||
def test_node_tl_has_changed_params(
|
||||
runner: CliRunner, fake_restapi, tmp_path: Path
|
||||
runner: CliRunner, patroni_api: PatroniAPI, tmp_path: Path
|
||||
) -> None:
|
||||
# This one is placed last because it seems like the exceptions are not flushed from stderr for the next tests.
|
||||
fake_state_file = tmp_path / "fake_file_name.state_file"
|
||||
fake_restapi("node_tl_has_changed")
|
||||
result = runner.invoke(
|
||||
main,
|
||||
[
|
||||
"-e",
|
||||
"https://10.20.199.3:8008",
|
||||
patroni_api.endpoint,
|
||||
"node_tl_has_changed",
|
||||
"--timeline",
|
||||
"58",
|
||||
|
@ -151,9 +165,7 @@ def test_node_tl_has_changed_params(
|
|||
== "NODETLHASCHANGED UNKNOWN: click.exceptions.UsageError: Either --timeline or --state-file should be provided for this service\n"
|
||||
)
|
||||
|
||||
result = runner.invoke(
|
||||
main, ["-e", "https://10.20.199.3:8008", "node_tl_has_changed"]
|
||||
)
|
||||
result = runner.invoke(main, ["-e", patroni_api.endpoint, "node_tl_has_changed"])
|
||||
assert result.exit_code == 3
|
||||
assert (
|
||||
result.stdout
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
import json
|
||||
import pathlib
|
||||
from typing import Any
|
||||
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from check_patroni.types import APIError, PatroniResource
|
||||
|
||||
here = pathlib.Path(__file__).parent
|
||||
|
||||
|
||||
def getjson(name: str) -> Any:
|
||||
path = here / "json" / f"{name}.json"
|
||||
if not path.exists():
|
||||
raise Exception(f"path does not exist : {path}")
|
||||
|
||||
with path.open() as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def my_mock(
|
||||
mocker: MockerFixture,
|
||||
json_file: str,
|
||||
*,
|
||||
status: int = 200,
|
||||
use_old_replica_state: bool = False,
|
||||
) -> None:
|
||||
def mock_rest_api(self: PatroniResource, service: str) -> Any:
|
||||
if status != 200:
|
||||
raise APIError("Test en erreur pour status code 200")
|
||||
if json_file:
|
||||
if use_old_replica_state:
|
||||
assert json_file.startswith(
|
||||
"cluster_has_replica"
|
||||
) or json_file.startswith("cluster_node_count")
|
||||
return cluster_api_set_replica_running(getjson(json_file))
|
||||
return getjson(json_file)
|
||||
return None
|
||||
|
||||
mocker.resetall()
|
||||
mocker.patch("check_patroni.types.PatroniResource.rest_api", mock_rest_api)
|
||||
|
||||
|
||||
def cluster_api_set_replica_running(js: Any) -> Any:
|
||||
# starting from 3.0.4 the state of replicas is streaming instead of running
|
||||
for node in js["members"]:
|
||||
if node["role"] in ["replica", "sync_standby"]:
|
||||
if node["state"] == "streaming":
|
||||
node["state"] = "running"
|
||||
return js
|
Loading…
Reference in a new issue