From 9970416e1aab370166a8a25ac3272829becaf403 Mon Sep 17 00:00:00 2001 From: Peder Hovdan Andresen <107681714+pederhan@users.noreply.github.com> Date: Tue, 3 Sep 2024 10:06:08 +0200 Subject: [PATCH] Add Zabbix 7 compatibility, rewrite API code (#81) * Add Zabbix 7.0 compatibility (#79) * Add Zabbix 7.0 compatibility * Add Host model comments * Add missing type annotations to __init__ .py * Fix state manager mypy issues * Refactor failsafe OK file checking, add test * Add type annotations to __init__ * Refactor process initialization * Refactor hanging process handling * Ignore missing mypy stubs in import * Extract failsafe functions, add tests * Fix zabbix_tags2zac_tags, add types * Fix StateManager mypy stub hack * Add type annotations for all processing.py methods * Improve state.State comments+docstrings * Refactor host modifier/source collector loading * Refactor failsafe checking Moves everything into failsafe.py module. This allows us to test the failsafe checking more thoroughly. * Fix incorrect variable usage * Refactor DB host retrieval in `ZabbixUpdater` * Fix broken tests * Add check_failsafe tests * Fix incorrect variable name * Rewrite API internals with Pydantic (#82) * Remove disabled hosts from maintenance * Add periodic maintenance cleanup * Add map_dir fixtures * Add config options * Fix mocks, use fixture * Rewrite API internals with Pydantic * Fix tests * Fix and improve JSON serialization * Fix changelog headers * Add API param building functions * Fix `set_hostgroups` not being able to remove groups * Add read-only mode for ZabbixAPI Activated during dryruns. * Fix `ParamsType` docstring * Document new config options in changelog * Update changelog * Add Py3.12 trove classifier * Update sample config * Fix ZabbixAPI method docstring tense * README: update supported versions * Create required host groups on startup * README: fix JSON example * README: Make host modifier example more relevant * Update changelog * Add notes on running source collectors standalone * Warn if no proxies * Remove redundant bool cast * Use absolute import * Use absolute imports * Sort host groups when logging new and old * Add note regarding Source Handler update interval * Change "replaced" to "updated" for source hosts * Remove trigger support in GC * Remove validation of request params * Add support for mysterious host.status==3 * Fix missing assignments in SignalHandler.__init__ * Fix missing parameter type annotation * Move warning next to statement that caused it * Add py.typed marker file * Update README, run GC every 24h * Show data in request errors * Fix fetching templates via old API code * Remove urllib3 logger, set logger on httpcore * Fetch groups when fetching hosts * Make bulk an optional field for CreateHostInterfaceDetails * Remove duplicated code for host interfaces * Refactor `set_interface` * Fix setting proxies on Zabbix 7 * Comments, var name * Update host group map when creating host group * Changelog heading * Fix README grammar * Log names of templates * Add NOTE comment * Add configurable group prefix separator --- CHANGELOG.md | 58 + README.md | 111 +- config.sample.toml | 29 + pyproject.toml | 8 +- tests/conftest.py | 48 +- tests/test_failsafe.py | 210 ++ tests/test_processing/test_zabbixupdater.py | 32 +- tests/test_state.py | 4 +- tests/test_utils.py | 19 +- zabbix_auto_config/__init__.py | 217 +- zabbix_auto_config/_types.py | 46 +- zabbix_auto_config/compat.py | 74 + zabbix_auto_config/exceptions.py | 69 + zabbix_auto_config/failsafe.py | 81 + zabbix_auto_config/models.py | 87 +- zabbix_auto_config/processing.py | 1376 +++++++----- zabbix_auto_config/py.typed | 0 zabbix_auto_config/pyzabbix/__init__.py | 3 + zabbix_auto_config/pyzabbix/client.py | 2189 +++++++++++++++++++ zabbix_auto_config/pyzabbix/compat.py | 142 ++ zabbix_auto_config/pyzabbix/enums.py | 151 ++ zabbix_auto_config/pyzabbix/types.py | 581 +++++ zabbix_auto_config/pyzabbix/utils.py | 29 + zabbix_auto_config/state.py | 42 +- zabbix_auto_config/utils.py | 18 +- 25 files changed, 4863 insertions(+), 761 deletions(-) create mode 100644 CHANGELOG.md create mode 100644 tests/test_failsafe.py create mode 100644 zabbix_auto_config/compat.py create mode 100644 zabbix_auto_config/failsafe.py create mode 100644 zabbix_auto_config/py.typed create mode 100644 zabbix_auto_config/pyzabbix/__init__.py create mode 100644 zabbix_auto_config/pyzabbix/client.py create mode 100644 zabbix_auto_config/pyzabbix/compat.py create mode 100644 zabbix_auto_config/pyzabbix/enums.py create mode 100644 zabbix_auto_config/pyzabbix/types.py create mode 100644 zabbix_auto_config/pyzabbix/utils.py diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..8a7d1ee --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,58 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + + + +## [0.2.0](https://github.com/unioslo/zabbix-auto-config/releases/tag/zac-v0.2.0) + +### Added + +- Zabbix 7 compatibility +- Configuration option for setting group prefix separator. + - `[zabbix]` + - `prefix_separator`: Separator for group prefixes. Default is `-`. +- Configuration options for each process. + - `[zac.process.garbage_collector]` + - `enabled`: Enable automatic garbage collection. + - `delete_empty_maintenance`: Delete maintenances that only contain disabled hosts. + - `update_interval`: Update interval in seconds. + - `[zac.process.host_updater]` + - `update_interval`: Update interval in seconds. + - `[zac.process.hostgroup_updater]` + - `update_interval`: Update interval in seconds. + - `[zac.process.template_updater]` + - `update_interval`: Update interval in seconds. + - `[zac.process.source_merger]` + - `update_interval`: Update interval in seconds. +- Automatic garbage collection of maintenances (and more in the future.) + - Removes disabled hosts from maintenances. + - This feature is disabled by default, and must be opted into with `zac.process.garbage_collector.enabled` + - Optionally also delete maintenances that only contain disabled hosts with `zac.process.garbage_collector.delete_empty_maintenance`. + - If you have a large number of disabled hosts, it's recommended to set a long `update_interval` to avoid unnecessary load on the Zabbix server. The default is 300 seconds. +- Automatic creation of required host groups. + - Creates the groups configured by the following options: + - `zabbix.hostgroup_all` + - `zabbix.hostgroup_disabled` +- Utility functions for serializing source collector outputs: + - `zabbix_auto_config.models.hosts_to_json` + - `zabbix_auto_config.models.print_hosts` +- `py.typed` marker file. + +### Changed + +- API internals rewritten to use Pydantic models. + - Borrows API code from Zabbix-cli v3. +- Dry run mode now guarantees no changes are made to Zabbix by preventing all write operations via the API. + +### Deprecated + +- Zabbix 5 support. + - Should in most cases work with Zabbix 5, but it will not be actively supported going forward. + +## 0.1.0 + +First version diff --git a/README.md b/README.md index 9808ed3..de842a8 100644 --- a/README.md +++ b/README.md @@ -2,13 +2,13 @@ Zabbix-auto-config is an utility that aims to automatically configure hosts, host groups, host inventories, template groups and templates in the monitoring software [Zabbix](https://www.zabbix.com/). -Note: Only tested with Zabbix 6.0 and 6.4. +Note: Primarily tested with Zabbix 7.0 and 6.4, but should work with 6.0 and 5.2. ## Requirements * Python >=3.8 * pip >=21.3 -* Zabbix >=5.0 +* Zabbix >=6.4 # Quick start @@ -19,16 +19,26 @@ This is a crash course in how to quickly get this application up and running in Setup a Zabbix test instance with [podman](https://podman.io/) and [podman-compose](https://github.com/containers/podman-compose/). ```bash -TAG=alpine-5.0-latest ZABBIX_PASSWORD=secret podman-compose up -d +TAG=7.0-ubuntu-latest ZABBIX_PASSWORD=secret podman-compose up -d ``` ## Zabbix prerequisites -It is currently assumed that you have the following hostgroups in Zabbix. You should logon to Zabbix and create them: +The following host groups are created in Zabbix if they do not exist: * All-auto-disabled-hosts * All-hosts +The name of these groups can be configured in `config.toml`: + +```toml +[zabbix] +hostgroup_all = "All-hosts" +hostgroup_disabled = "All-auto-disabled-hosts" +``` + +These groups contain enabled and disabled hosts respectively. + For automatic linking in templates you could create the templates: * Template-barry @@ -36,6 +46,8 @@ For automatic linking in templates you could create the templates: ## Database +The application requires a PostgreSQL database to store the state of the collected hosts. The database can be created with the following command: + ```bash PGPASSWORD=secret psql -h localhost -U postgres -p 5432 -U zabbix << EOF CREATE DATABASE zac; @@ -49,11 +61,13 @@ CREATE TABLE hosts_source ( EOF ``` +Replace login credentials with your own when running against a different database. This is a one-time procedure per environment. + ## Application -### Installation (production) +### Installation -For production, installing the project in a virtual environment directly with pip is the recommended way to go: +Installing the project in a virtual environment directly with pip is the recommended way to go: ```bash python -m venv venv @@ -100,8 +114,9 @@ def collect(*args: Any, **kwargs: Any) -> List[Host]: if __name__ == "__main__": - for host in collect(): - print(host.model_dump_json()) + # Print hosts as a JSON array when running standalone + from zabbix_auto_config.models import print_hosts + print_hosts(collect()) EOF cat > path/to/host_modifier_dir/mod.py << EOF from zabbix_auto_config.models import Host @@ -133,7 +148,7 @@ zac ## Systemd unit -You could run this as a systemd service: +To add automatic startup of the application with systemd, create a unit file in `/etc/systemd/system/zabbix-auto-config.service`: ```ini [Unit] @@ -147,6 +162,8 @@ WorkingDirectory=/home/zabbix/zabbix-auto-config Environment=PATH=/home/zabbix/zabbix-auto-config/venv/bin ExecStart=/home/zabbix/zabbix-auto-config/venv/bin/zac TimeoutSec=300 +Restart=always +RestartSec=5s [Install] WantedBy=multi-user.target @@ -154,16 +171,24 @@ WantedBy=multi-user.target ## Source collectors +ZAC relies on "Source Collectors" to fetch host data from various sources. +A source can be anything: an API, a file, a database, etc. What matters is that +the source is able to return a list of `zabbix_auto_config.models.Host` objects. ZAC uses these objects to create or update hosts in Zabbix. If a host with the same hostname is collected from multiple different sources, its information is combined into a single logical host object before being used to create/update the host in Zabbix. + +### Writing a source collector + Source collectors are Python modules placed in a directory specified by the `source_collector_dir` option in the `[zac]` table of the configuration file. Zabbix-auto-config attempts to load all modules referenced by name in the configuration file from this directory. If any referenced modules cannot be found in the directory, they will be ignored. -A source collector module contains a function named `collect` that returns a list of `Host` objects. These host objects are used by Zabbix-auto-config to create or update hosts in Zabbix. +A source collector module contains a function named `collect()` that returns a list of `Host` objects. These host objects are used by Zabbix-auto-config to create or update hosts in Zabbix. Here's an example of a source collector module that reads hosts from a file: ```python # path/to/source_collector_dir/load_from_json.py +import json from typing import Any, Dict, List + from zabbix_auto_config.models import Host DEFAULT_FILE = "hosts.json" @@ -171,10 +196,30 @@ DEFAULT_FILE = "hosts.json" def collect(*args: Any, **kwargs: Any) -> List[Host]: filename = kwargs.get("filename", DEFAULT_FILE) with open(filename, "r") as f: - return [Host(**host) for host in f.read()] + return [Host(**host) for host in json.load(f)] ``` -A module is recognized as a source collector if it contains a `collect` function that accepts an arbitrary number of arguments and keyword arguments and returns a list of `Host` objects. Type annotations are optional but recommended. +A module is recognized as a source collector if it contains a `collect()` function that accepts an arbitrary number of arguments and keyword arguments and returns a list of `Host` objects. Type annotations are optional but recommended. + +We can also provide a `if __name__ == "__main__"` block to run the collector standalone. This is useful for testing the collector module without running the entire application. + +```py +if __name__ == "__main__": + # Print hosts as a JSON array when running standalone + from zabbix_auto_config.models import print_hosts + print_hosts(collect()) +``` + +If you wish to collect just the JSON output and write it to a file or otherwise manipulate it, you can import the `hosts_to_json` function from `zabbix_auto_config.models` and use it like this: + +```py +if __name__ == "__main__": + from zabbix_auto_config.models import hosts_to_json + with open("output.json", "w") as f: + f.write(hosts_to_json(collect())) +``` + +### Configuration The configuration entry for loading a source collector module, like the `load_from_json.py` module above, includes both mandatory and optional fields. Here's how it can be configured: @@ -186,9 +231,12 @@ error_tolerance = 5 error_duration = 360 exit_on_error = false disable_duration = 3600 +# Extra keyword arguments to pass to the collect function: filename = "hosts.json" ``` +Only the extra `filename` option is passed in as a kwarg to the `collect()` function. + The following configurations options are available: ### Mandatory configuration @@ -197,13 +245,12 @@ The following configurations options are available: `module_name` is the name of the module to load. This is the name that will be used in the configuration file to reference the module. It must correspond with the name of the module file, without the `.py` extension. #### update_interval -`update_interval` is the number of seconds between updates. This is the interval at which the `collect` function will be called. +`update_interval` is the number of seconds between updates. This is the interval at which the `collect()` function will be called. ### Optional configuration (error handling) If `error_tolerance` number of errors occur within `error_duration` seconds, the collector is disabled. Source collectors do not tolerate errors by default and must opt-in to this behavior by setting `error_tolerance` and `error_duration` to non-zero values. If `exit_on_error` is set to `true`, the application will exit. Otherwise, the collector will be disabled for `disable_duration` seconds. - #### error_tolerance `error_tolerance` (default: 0) is the maximum number of errors tolerated within `error_duration` seconds. @@ -226,13 +273,16 @@ A useful guide is to set `error_duration` as `(error_tolerance + 1) * update_int ### Keyword arguments -Any extra config options specified in the configuration file will be passed to the `collect` function as keyword arguments. In the example above, the `filename` option is passed to the `collect` function, and then accessed via `kwargs["filename"]`. +Any extra config options specified in the configuration file will be passed to the `collect()` function as keyword arguments. In the example above, the `filename` option is passed to the `collect()` function, and then accessed via `kwargs["filename"]`. ## Host modifiers Host modifiers are Python modules (files) that are placed in a directory defined by the option `host_modifier_dir` in the `[zac]` table of the config file. A host modifier is a module that contains a function named `modify` that takes a `Host` object as its only argument, modifies it, and returns it. Zabbix-auto-config will attempt to load all modules in the given directory. + +### Writing a host modifier + A host modifier module that adds a given siteadmin to all hosts could look like this: ```py @@ -243,7 +293,8 @@ from zabbix_auto_config.models import Host SITEADMIN = "admin@example.com" def modify(host: Host) -> Host: - host.siteadmins.add(SITEADMIN) + if host.hostname.endswith(".example.com"): + host.siteadmins.add(SITEADMIN) return host ``` @@ -259,6 +310,34 @@ Zac manages only inventory properties configured as `managed_inventory` in `conf 2. Remove the "location" property from the host in the source 3. "location=x" will remain in Zabbix +## Garbage Collection + +ZAC provides an optional Zabbix garbage collection module that cleans up stale data from Zabbix that is not otherwise managed by ZAC, such as maintenances. + +The garbage collector currently does the following: + +- Removes disabled hosts from maintenances. +- Deletes maintenances that only contain disabled hosts. + +Under normal usage, hosts are removed from maintenances when being disabled by ZAC, but if hosts are disabled outside of ZAC, they will not be removed from maintenances. The GC module will remove these hosts, and optionally delete the maintenance altogether if it only contains disabled hosts. + +To enable garbage collection, add the following to your config: + +```toml +[zac.process.garbage_collector] +enabled = true +delete_empty_maintenance = true +``` + +By default, the garbage collector runs every 24 hours. This can be adjusted with the `update_interval` option: + +```toml +[zac.process.garbage_collector] +update_interval = 3600 # Run every hour +``` + +---- + ## Development We use the project management tool [Hatch](https://hatch.pypa.io/latest/) for developing the project. The tool manages virtual environment creation, dependency installation, as well as building and publishing of the project, and more. diff --git a/config.sample.toml b/config.sample.toml index 135d7fd..9965d96 100644 --- a/config.sample.toml +++ b/config.sample.toml @@ -27,6 +27,29 @@ failsafe_ok_file = "/tmp/zac_failsafe_ok" # It is then up to the administrator to manually delete the file afterwards. failsafe_ok_file_strict = true +# Configuration for ZAC processes. +[zac.process.source_merger] +# How often to run the source merger in seconds +update_interval = 60 + +[zac.process.host_updater] +update_interval = 60 + +[zac.process.hostgroup_updater] +update_interval = 60 + +[zac.process.template_updater] +update_interval = 60 + +[zac.process.garbage_collector] +# Enable garbage collection, including: +# - Remove disabled hosts from maintenances +enabled = false +# Delete maintenances if all its hosts are disabled +delete_empty_maintenance = false +update_interval = 86400 # every 24 hours + + [zabbix] # Directory containing mapping files. map_dir = "path/to/map_dir/" @@ -36,6 +59,7 @@ username = "Admin" password = "zabbix" # Preview changes without making them. +# Disables all write operations to Zabbix. dryrun = true # Maximum number of hosts to add/remove in one go. @@ -56,10 +80,15 @@ hostgroup_source_prefix = "Source-" hostgroup_importance_prefix = "Importance-" # Template group creation +# If we have a host group named `Siteadmin-my-hosts`, ZAC creates a +# template group named `Templates-my-hosts` # NOTE: will create host groups if enabled on Zabbix <6.2 create_templategroups = true templategroup_prefix = "Templates-" +# Separator used for group name prefixes +prefix_separator = "-" + extra_siteadmin_hostgroup_prefixes = [] [source_collectors.mysource] diff --git a/pyproject.toml b/pyproject.toml index 61d892f..a112bb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,15 +23,16 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ - "multiprocessing-logging==0.3.1", + "multiprocessing-logging>=0.3.1", "psycopg2>=2.9.5", "pydantic>=2.6.0", - "pyzabbix>=1.3.0", - "requests>=1.0.0", + "httpx>=0.27.0", "tomli>=2.0.0", "packaging>=23.2", + "typing_extensions>=4.12.0", ] [project.optional-dependencies] @@ -65,6 +66,7 @@ extend-select = [ "LOG", # flake8-logging "PLE1205", # pylint (too many logging args) "PLE1206", # pylint (too few logging args) + "TID252", # flake8-tidy-imports (prefer absolute imports) ] [tool.ruff.lint.isort] diff --git a/tests/conftest.py b/tests/conftest.py index 8a58525..ca56f13 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -114,8 +114,15 @@ def config(sample_config: str) -> Iterable[models.Settings]: yield models.Settings(**tomli.loads(sample_config)) +@pytest.fixture(scope="function") +def map_dir(tmp_path: Path) -> Iterable[Path]: + mapdir = tmp_path / "maps" + mapdir.mkdir() + yield mapdir + + @pytest.fixture -def hostgroup_map_file(tmp_path: Path) -> Iterable[Path]: +def hostgroup_map_file(map_dir: Path) -> Iterable[Path]: contents = """ # This file defines assosiation between siteadm fetched from Nivlheim and hostsgroups in Zabbix. # A siteadm can be assosiated only with one hostgroup or usergroup. @@ -133,11 +140,44 @@ def hostgroup_map_file(tmp_path: Path) -> Iterable[Path]: # user3@example.com:Hostgroup-user3-primary """ - map_file_path = tmp_path / "siteadmin_hostgroup_map.txt" + map_file_path = map_dir / "siteadmin_hostgroup_map.txt" + map_file_path.write_text(contents) + yield map_file_path + + +@pytest.fixture +def property_hostgroup_map_file(map_dir: Path) -> Iterable[Path]: + contents = """ +is_app_server:Role-app-servers +is_adfs_server:Role-adfs-servers +""" + map_file_path = map_dir / "property_hostgroup_map.txt" map_file_path.write_text(contents) yield map_file_path +@pytest.fixture +def property_template_map_file(map_dir: Path) -> Iterable[Path]: + contents = """ +is_app_server:Template-app-server +is_adfs_server:Template-adfs-server +""" + map_file_path = map_dir / "property_template_map.txt" + map_file_path.write_text(contents) + yield map_file_path + + +@pytest.fixture +def map_dir_with_files( + map_dir: Path, + hostgroup_map_file: Path, + property_hostgroup_map_file: Path, + property_template_map_file: Path, +) -> Iterable[Path]: + """Creates all mapping files and returns the path to their directory.""" + yield map_dir + + @pytest.fixture(autouse=True, scope="session") def setup_multiprocessing_start_method() -> None: # On MacOS we have to set the start mode to fork @@ -165,7 +205,9 @@ def __init__(self, *args, **kwargs): @pytest.fixture(autouse=True) def mock_zabbix_api() -> Iterable[Type[MockZabbixAPI]]: - with mock.patch("pyzabbix.ZabbixAPI", new=MockZabbixAPI) as api_mock: + with mock.patch( + "zabbix_auto_config.processing.ZabbixAPI", new=MockZabbixAPI + ) as api_mock: yield api_mock diff --git a/tests/test_failsafe.py b/tests/test_failsafe.py new file mode 100644 index 0000000..d0639f4 --- /dev/null +++ b/tests/test_failsafe.py @@ -0,0 +1,210 @@ +from __future__ import annotations + +import logging +from pathlib import Path +from typing import Iterable +from typing import List +from unittest.mock import MagicMock + +import pytest + +from zabbix_auto_config.exceptions import ZACException +from zabbix_auto_config.failsafe import check_failsafe +from zabbix_auto_config.failsafe import check_failsafe_ok_file +from zabbix_auto_config.failsafe import write_failsafe_hosts +from zabbix_auto_config.models import HostActions +from zabbix_auto_config.models import Settings + + +@pytest.fixture() +def failsafe_ok_file(tmp_path: Path) -> Iterable[Path]: + failsafe_file = tmp_path / "failsafe" + try: + yield failsafe_file + finally: + if failsafe_file.exists(): + failsafe_file.unlink() + + +@pytest.fixture() +def failsafe_file(tmp_path: Path) -> Iterable[Path]: + failsafe_file = tmp_path / "failsafe_hosts.json" + try: + yield failsafe_file + finally: + if failsafe_file.exists(): + failsafe_file.unlink() + + +FAIL_ZAC = pytest.mark.xfail(strict=True, raises=ZACException) + + +@pytest.mark.parametrize( + "failsafe, to_add, to_remove", + [ + pytest.param(1, ["foo.example.com"], [], id="OK (add)"), + pytest.param(1, [], ["foo.example.com"], id="OK (remove)"), + pytest.param(1, ["foo.example.com"], ["bar.example.com"], id="OK (add/remove)"), + pytest.param( + 3, + ["foo.example.com", "bar.example.com"], + ["baz.example.com", "qux.example.com"], + id="OK (add/remove>1)", + ), + pytest.param( + 1, + ["foo.example.com", "bar.example.com"], + [], + id="Fail (add)", + marks=FAIL_ZAC, + ), + pytest.param( + 1, + [], + ["foo.example.com", "bar.example.com"], + id="Fail (remove)", + marks=FAIL_ZAC, + ), + pytest.param( + 1, + ["foo.example.com"], + ["bar.example.com", "baz.example.com"], + id="Fail (add/remove)", + marks=FAIL_ZAC, + ), + pytest.param( + 1, + ["foo.example.com", "bar.example.com"], + [], + id="Fail (add>1)", + marks=FAIL_ZAC, + ), + pytest.param( + 1, + [], + ["baz.example.com", "qux.example.com"], + id="Fail (remove>1)", + marks=FAIL_ZAC, + ), + pytest.param( + 1, + ["foo.example.com", "bar.example.com"], + ["baz.example.com", "qux.example.com"], + id="Fail (add/remove>1)", + marks=FAIL_ZAC, + ), + ], +) +def test_check_failsafe( + config: Settings, failsafe: int, to_add: List[str], to_remove: List[str] +) -> None: + config.zabbix.failsafe = failsafe + check_failsafe(config, to_add, to_remove) + + +def test_check_failsafe_ok_file_not_configured(config: Settings) -> None: + """Test that an unconfigured failsafe OK file always returns False""" + config.zac.failsafe_ok_file = None + assert check_failsafe_ok_file(config.zac) is False + + +@pytest.mark.parametrize("content", ["", "1"]) +def test_check_failsafe_ok_file_exists( + failsafe_ok_file: Path, config: Settings, content: str +) -> None: + """Test that a failsafe ok file that exists is OK with and without content""" + config.zac.failsafe_ok_file = failsafe_ok_file + failsafe_ok_file.write_text(content) + assert check_failsafe_ok_file(config.zac) is True + # Ensure that approving the file also deletes it + assert failsafe_ok_file.exists() is False + + +def test_check_failsafe_ok_file_not_exists( + failsafe_ok_file: Path, config: Settings +) -> None: + """Test that a missing failsafe OK file returns False""" + config.zac.failsafe_file = failsafe_ok_file + assert failsafe_ok_file.exists() is False + assert check_failsafe_ok_file(config.zac) is False + assert failsafe_ok_file.exists() is False # Should still not exist + + +@pytest.mark.parametrize("strict", [True, False]) +def test_check_failsafe_ok_file_unable_to_delete( + config: Settings, strict: bool +) -> None: + """Test a failsafe OK file we are unable to delete.""" + # NOTE: it's quite hard to mock a Path file with a real path + # so we instead mock the Path object with a MagicMock. + # An alternative would be to add a function we can pass Path objects + # to for deletion, then mock that function. + mock_file = MagicMock(spec=Path) + mock_file.exists.return_value = True + mock_file.unlink.side_effect = OSError("Unable to delete file") + + assert mock_file.exists() is True + config.zac.failsafe_ok_file = mock_file + config.zac.failsafe_ok_file_strict = strict + # Fails in strict mode - must be able to delete the file + if strict: + assert check_failsafe_ok_file(config.zac) is False + else: + assert check_failsafe_ok_file(config.zac) is True + + +@pytest.mark.parametrize( + "to_add,to_remove", + [ + pytest.param( + ["foo.example.com", "bar.example.com"], + ["baz.example.com", "qux.example.com"], + id="Add and remove", + ), + pytest.param(["foo.example.com", "bar.example.com"], [], id="Add"), + pytest.param([], ["baz.example.com", "qux.example.com"], id="Remove"), + pytest.param([], [], id="No changes"), + ], +) +@pytest.mark.parametrize("failsafe_file_exists", [True, False]) +def test_write_failsafe_hosts( + config: Settings, + failsafe_file: Path, + failsafe_file_exists: bool, + to_add: list[str], + to_remove: list[str], +) -> None: + """Write a list of hosts to a failsafe file.""" + # Ensure we handle both file existing and not existing + if failsafe_file_exists: + failsafe_file.write_text("Contains some data") + assert failsafe_file.exists() + else: + assert not failsafe_file.exists() + + # Assign file and write the hosts + config.zac.failsafe_file = failsafe_file + write_failsafe_hosts(config.zac, to_add, to_remove) + + # Check contents of file + assert failsafe_file.exists() + content = failsafe_file.read_text() + h = HostActions.model_validate_json(content) + assert h == HostActions(add=to_add, remove=to_remove) + + +def test_write_failsafe_hosts_no_file( + caplog: pytest.LogCaptureFixture, config: Settings +) -> None: + """Attempt to write failsafe hosts without a failsafe file.""" + caplog.set_level(logging.WARNING) + config.zac.failsafe_file = None + write_failsafe_hosts( + config.zac, + ["foo.example.com", "bar.example.com"], + ["baz.example.com", "qux.example.com"], + ) + assert ( + "No failsafe file configured, cannot write hosts to add/remove." + in caplog.messages + ) diff --git a/tests/test_processing/test_zabbixupdater.py b/tests/test_processing/test_zabbixupdater.py index f314dde..956722a 100644 --- a/tests/test_processing/test_zabbixupdater.py +++ b/tests/test_processing/test_zabbixupdater.py @@ -5,7 +5,8 @@ from unittest.mock import patch import pytest -import requests +from httpx import ConnectTimeout +from httpx import ReadTimeout from zabbix_auto_config import exceptions from zabbix_auto_config.models import Settings @@ -18,23 +19,25 @@ def raises_connect_timeout(*args, **kwargs): - raise requests.exceptions.ConnectTimeout("connect timeout") + raise ConnectTimeout("connect timeout") # We have to set the side effect in the constructor class TimeoutAPI(MockZabbixAPI): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self.login = PicklableMock( - side_effect=requests.exceptions.ConnectTimeout("connect timeout") - ) + self.login = PicklableMock(side_effect=ConnectTimeout("connect timeout")) @pytest.mark.timeout(10) -@patch("pyzabbix.ZabbixAPI", TimeoutAPI()) # mock with timeout on login -def test_zabbixupdater_connect_timeout(mock_psycopg2_connect, config: Settings): +@patch( + "zabbix_auto_config.processing.ZabbixAPI", TimeoutAPI() +) # mock with timeout on login +def test_zabbixupdater_connect_timeout( + mock_psycopg2_connect, config: Settings, map_dir_with_files: Path +): config.zabbix = ZabbixSettings( - map_dir="", + map_dir=str(map_dir_with_files), url="", username="", password="", @@ -53,22 +56,15 @@ def test_zabbixupdater_connect_timeout(mock_psycopg2_connect, config: Settings): class TimeoutUpdater(ZabbixUpdater): def do_update(self): - raise requests.exceptions.ReadTimeout("read timeout") + raise ReadTimeout("read timeout") @pytest.mark.timeout(5) def test_zabbixupdater_read_timeout( - tmp_path: Path, mock_psycopg2_connect, config: Settings + mock_psycopg2_connect, config: Settings, map_dir_with_files: Path ): - # TODO: use mapping file fixtures from #67 - map_dir = tmp_path / "maps" - map_dir.mkdir() - (map_dir / "property_template_map.txt").touch() - (map_dir / "property_hostgroup_map.txt").touch() - (map_dir / "siteadmin_hostgroup_map.txt").touch() - config.zabbix = ZabbixSettings( - map_dir=str(map_dir), + map_dir=str(map_dir_with_files.absolute()), url="", username="", password="", diff --git a/tests/test_state.py b/tests/test_state.py index 631ae72..af9b120 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -166,9 +166,9 @@ def test_state_asdict_error(use_manager: bool) -> None: # Mocking datetime in subprocesses is a bit of a chore, so we just # check that the error_time is a timestamp value within a given range - pre = datetime.datetime.now().timestamp() + pre = time.time() state.set_error(CustomException("Test error")) - post = datetime.datetime.now().timestamp() + post = time.time() d = state.asdict() assert post >= d["error_time"] >= pre diff --git a/tests/test_utils.py b/tests/test_utils.py index 72c11dd..0cce716 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -18,6 +18,7 @@ from pytest import LogCaptureFixture from zabbix_auto_config import utils +from zabbix_auto_config.pyzabbix.types import HostTag @pytest.mark.parametrize( @@ -126,22 +127,16 @@ def test_read_map_file_fuzz(tmp_path: Path, text: str): "tags,expected", [ ( - [{"tag": "tag1", "value": "x"}], + [HostTag(tag="tag1", value="x")], {("tag1", "x")}, ), ( - [{"tag": "tag1", "value": "x"}, {"tag": "tag2", "value": "y"}], + [HostTag(tag="tag1", value="x"), HostTag(tag="tag2", value="y")], {("tag1", "x"), ("tag2", "y")}, ), - ( - [{"tag": "tag1", "value": "x", "foo": "tag2", "bar": "y"}], - {("tag1", "x", "tag2", "y")}, - ), ], ) -def test_zabbix_tags2zac_tags( - tags: List[Dict[str, str]], expected: Set[Tuple[str, str]] -): +def test_zabbix_tags2zac_tags(tags: List[HostTag], expected: Set[Tuple[str, str]]): assert utils.zabbix_tags2zac_tags(tags) == expected @@ -150,15 +145,15 @@ def test_zabbix_tags2zac_tags( [ ( {("tag1", "x")}, - [{"tag": "tag1", "value": "x"}], + [HostTag(tag="tag1", value="x")], ), ( {("tag1", "x"), ("tag2", "y")}, - [{"tag": "tag1", "value": "x"}, {"tag": "tag2", "value": "y"}], + [HostTag(tag="tag1", value="x"), HostTag(tag="tag2", value="y")], ), ( {("tag1", "x", "tag2", "y")}, - [{"tag": "tag1", "value": "x"}], + [HostTag(tag="tag1", value="x")], ), ], ) diff --git a/zabbix_auto_config/__init__.py b/zabbix_auto_config/__init__.py index 8e48e03..2ec58f8 100644 --- a/zabbix_auto_config/__init__.py +++ b/zabbix_auto_config/__init__.py @@ -10,26 +10,28 @@ import os.path import sys import time +from pathlib import Path from typing import List import multiprocessing_logging import tomli +from zabbix_auto_config import models +from zabbix_auto_config import processing +from zabbix_auto_config.__about__ import __version__ +from zabbix_auto_config._types import HealthDict +from zabbix_auto_config._types import HostModifier +from zabbix_auto_config._types import HostModifierModule +from zabbix_auto_config._types import SourceCollector +from zabbix_auto_config._types import SourceCollectorModule from zabbix_auto_config.state import get_manager -from . import exceptions -from . import models -from . import processing -from .__about__ import __version__ -from ._types import SourceCollectorDict -from ._types import SourceCollectorModule - -def get_source_collectors(config: models.Settings) -> List[SourceCollectorDict]: +def get_source_collectors(config: models.Settings) -> List[SourceCollector]: source_collector_dir = config.zac.source_collector_dir sys.path.append(source_collector_dir) - source_collectors = [] # type: List[SourceCollectorDict] + source_collectors = [] # type: List[SourceCollector] for ( source_collector_name, source_collector_config, @@ -43,47 +45,79 @@ def get_source_collectors(config: models.Settings) -> List[SourceCollectorDict]: source_collector_dir, ) continue - if not isinstance(module, SourceCollectorModule): logging.error( "Source collector named '%s' is not a valid source collector module", source_collector_config.module_name, ) continue + source_collectors.append( + SourceCollector( + name=source_collector_name, + module=module, + config=source_collector_config, + ) + ) + return source_collectors - source_collector = { - "name": source_collector_name, - "module": module, - "config": source_collector_config, - } # type: SourceCollectorDict - - source_collectors.append(source_collector) - return source_collectors +def get_host_modifiers(modifier_dir: str) -> List[HostModifier]: + sys.path.append(modifier_dir) + try: + module_names = [ + filename[:-3] + for filename in os.listdir(modifier_dir) + if filename.endswith(".py") and filename != "__init__.py" + ] + except FileNotFoundError: + logging.error("Host modififier directory %s does not exist.", modifier_dir) + sys.exit(1) + host_modifiers = [] # type: List[HostModifier] + for module_name in module_names: + module = importlib.import_module(module_name) + if not isinstance(module, HostModifierModule): + logging.warning( + "Module '%s' is not a valid host modifier module. Skipping.", + module_name, + ) + continue + host_modifiers.append( + HostModifier( + name=module_name, + module=module, + ) + ) + logging.info( + "Loaded %d host modifiers: %s", + len(host_modifiers), + ", ".join([repr(modifier.name) for modifier in host_modifiers]), + ) + return host_modifiers -def get_config(): +def get_config() -> models.Settings: cwd = os.getcwd() config_file = os.path.join(cwd, "config.toml") with open(config_file) as f: content = f.read() - - config = tomli.loads(content) - config = models.Settings(**config) - + config_dict = tomli.loads(content) + config = models.Settings(**config_dict) return config def write_health( - health_file, processes: List[processing.BaseProcess], queues, failsafe -): + health_file: Path, + processes: List[processing.BaseProcess], + queues: List[multiprocessing.Queue], + failsafe: int, +) -> None: now = datetime.datetime.now() - health = { + health: HealthDict = { "date": now.isoformat(timespec="seconds"), "date_unixtime": int(now.timestamp()), "pid": os.getpid(), "cwd": os.getcwd(), - "all_ok": True, + "all_ok": all(p.state.ok for p in processes), "processes": [], "queues": [], "failsafe": failsafe, @@ -99,8 +133,6 @@ def write_health( } ) - health["all_ok"] = all(p.state.ok for p in processes) - for queue in queues: health["queues"].append( { @@ -115,7 +147,7 @@ def write_health( logging.error("Unable to write health file %s: %s", health_file, e) -def log_process_status(processes): +def log_process_status(processes: List[processing.BaseProcess]) -> None: process_statuses = [] for process in processes: @@ -126,7 +158,7 @@ def log_process_status(processes): logging.info("Process status: %s", ", ".join(process_statuses)) -def main(): +def main() -> None: multiprocessing_logging.install_mp_handler() logging.basicConfig( format="%(asctime)s %(levelname)s [%(processName)s %(process)d] [%(name)s] %(message)s", @@ -135,73 +167,89 @@ def main(): ) config = get_config() logging.getLogger().setLevel(config.zac.log_level) - logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) + logging.getLogger("httpcore").setLevel(logging.ERROR) + logging.getLogger("httpx").setLevel(logging.ERROR) logging.info("Main start (%d) version %s", os.getpid(), __version__) stop_event = multiprocessing.Event() state_manager = get_manager() - processes = [] # type: List[processing.BaseProcess] - source_hosts_queues = [] + # Import host modifier and source collector modules + host_modifiers = get_host_modifiers(config.zac.host_modifier_dir) source_collectors = get_source_collectors(config) + + # Initialize source collector processes from imported modules + source_hosts_queues = [] # type: List[multiprocessing.Queue[models.Host]] + src_processes = [] # type: List[processing.BaseProcess] for source_collector in source_collectors: - source_hosts_queue = multiprocessing.Queue(maxsize=1) - process = processing.SourceCollectorProcess( - source_collector["name"], + # Each source collector has its own queue + source_hosts_queue = multiprocessing.Queue(maxsize=1) # type: multiprocessing.Queue[models.Host] + source_hosts_queues.append(source_hosts_queue) + process: processing.BaseProcess = processing.SourceCollectorProcess( + source_collector.name, state_manager.State(), - source_collector["module"], - source_collector["config"], + source_collector.module, + source_collector.config, source_hosts_queue, ) - source_hosts_queues.append(source_hosts_queue) - processes.append(process) + src_processes.append(process) - try: - process = processing.SourceHandlerProcess( + # Initialize the default processes + processes: List[processing.BaseProcess] = [ + processing.SourceHandlerProcess( "source-handler", state_manager.State(), config.zac.db_uri, source_hosts_queues, - ) - processes.append(process) - - process = processing.SourceMergerProcess( + ), + processing.SourceMergerProcess( "source-merger", state_manager.State(), config.zac.db_uri, - config.zac.host_modifier_dir, - ) - processes.append(process) - - process = processing.ZabbixHostUpdater( + host_modifiers, + ), + processing.ZabbixHostUpdater( "zabbix-host-updater", state_manager.State(), config.zac.db_uri, config, - ) - processes.append(process) - - process = processing.ZabbixHostgroupUpdater( + ), + processing.ZabbixHostgroupUpdater( "zabbix-hostgroup-updater", state_manager.State(), config.zac.db_uri, config, - ) - processes.append(process) - - process = processing.ZabbixTemplateUpdater( + ), + processing.ZabbixTemplateUpdater( "zabbix-template-updater", state_manager.State(), config.zac.db_uri, config, + ), + ] + + # Garbage collection process + if config.zac.process.garbage_collector.enabled: + processes.append( + processing.ZabbixGarbageCollector( + "zabbix-garbage-collector", + state_manager.State(), + config.zac.db_uri, + config, + ) ) - processes.append(process) - except exceptions.ZACException as e: - logging.error("Failed to initialize child processes. Exiting: %s", str(e)) - sys.exit(1) - for process in processes: - process.start() + # Combine the source collector processes with the other processes + processes.extend(src_processes) + + # Abort if we can't start _all_ processes + for pr in processes: + try: + pr.start() + except Exception as e: + logging.error("Unable to start process %s: %s", pr.name, e) + stop_event.set() # Stop other proceses immediately + break with processing.SignalHandler(stop_event): status_interval = 60 @@ -237,25 +285,26 @@ def main(): ", ".join([str(queue.qsize()) for queue in source_hosts_queues]), ) - for process in processes: - logging.info("Terminating: %s(%d)", process.name, process.pid) - process.terminate() - - alive_processes = [process for process in processes if process.is_alive()] - while alive_processes: - process = alive_processes[0] - logging.info("Waiting for: %s(%d)", process.name, process.pid) - log_process_status(processes) # TODO: Too verbose? - process.join(10) - if process.exitcode is None: - logging.warning( - "Process hanging. Signaling new terminate: %s(%d)", - process.name, - process.pid, - ) - process.terminate() + for pr in processes: + logging.info("Terminating: %s(%d)", pr.name, pr.pid) + pr.terminate() + + def get_alive(): + return [process for process in processes if process.is_alive()] + + while alive := get_alive(): + log_process_status(processes) + for process in alive: + logging.info("Waiting for: %s(%d)", process.name, process.pid) + process.join(10) + if process.exitcode is None: + logging.warning( + "Process hanging. Signaling new terminate: %s(%d)", + process.name, + process.pid, + ) + process.terminate() time.sleep(1) - alive_processes = [process for process in processes if process.is_alive()] logging.info("Main exit") diff --git a/zabbix_auto_config/_types.py b/zabbix_auto_config/_types.py index 8223c5d..8a8afb4 100644 --- a/zabbix_auto_config/_types.py +++ b/zabbix_auto_config/_types.py @@ -7,12 +7,27 @@ from typing import Any from typing import List +from typing import NamedTuple from typing import Protocol +from typing import Sequence +from typing import Set +from typing import Tuple from typing import TypedDict from typing import runtime_checkable -from .models import Host -from .models import SourceCollectorSettings +from zabbix_auto_config.models import Host +from zabbix_auto_config.models import SourceCollectorSettings + + +class ZabbixTag(TypedDict): + tag: str + value: str + + +ZabbixTags = Sequence[ZabbixTag] + +ZacTag = Tuple[str, str] +ZacTags = Set[ZacTag] @runtime_checkable @@ -33,19 +48,36 @@ def modify(self, host: Host) -> Host: ... -class HostModifierDict(TypedDict): - """The dict created by - `zabbix_auto_config.processing.SourceMergerProcess.get_host_modifiers` - for each imported host modifier module.""" +class HostModifier(NamedTuple): + """An imported host modifier.""" name: str module: HostModifierModule -class SourceCollectorDict(TypedDict): +class SourceCollector(NamedTuple): """The dict created by `zabbix_auto_config.get_source_collectors` for each imported source collector module.""" name: str module: SourceCollectorModule config: SourceCollectorSettings + + +class QueueDict(TypedDict): + """Queue information for the health check dict.""" + + size: int + + +class HealthDict(TypedDict): + """Application health dict used by `zabbix_auto_config.__init__.write_health`""" + + date: str + date_unixtime: int + pid: int + cwd: str + all_ok: bool + processes: List[dict] + queues: List[QueueDict] + failsafe: int diff --git a/zabbix_auto_config/compat.py b/zabbix_auto_config/compat.py new file mode 100644 index 0000000..0cc2370 --- /dev/null +++ b/zabbix_auto_config/compat.py @@ -0,0 +1,74 @@ +"""Compatibility functions to support different Zabbix API versions.""" + +from __future__ import annotations + +from typing import Literal + +from packaging.version import Version + +# Compatibility methods for Zabbix API objects properties and method parameters. +# Returns the appropriate property name for the given Zabbix version. +# +# FORMAT: _ +# EXAMPLE: user_name() (User object, name property) +# +# NOTE: All functions follow the same pattern: +# Early return if the version is older than the version where the property +# was deprecated, otherwise return the new property name as the default. + + +def host_proxyid(version: Version) -> Literal["proxy_hostid", "proxyid"]: + # https://support.zabbix.com/browse/ZBXNEXT-8500 + # https://www.zabbix.com/documentation/7.0/en/manual/api/changes#host + if version.release < (7, 0, 0): + return "proxy_hostid" + return "proxyid" + + +def host_hostgroups(version: Version) -> Literal["groups", "hostgroups"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # https://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2#host + if version.release < (6, 2, 0): + return "groups" + return "hostgroups" + + +def proxy_name(version: Version) -> Literal["host", "name"]: + # https://support.zabbix.com/browse/ZBXNEXT-8500 + # https://www.zabbix.com/documentation/7.0/en/manual/api/changes#proxy + if version.release < (7, 0, 0): + return "host" + return "name" + + +def proxy_operating_mode(version: Version) -> Literal["status", "operating_mode"]: + # https://support.zabbix.com/browse/ZBXNEXT-8500 + # https://www.zabbix.com/documentation/7.0/en/manual/api/changes#proxy + if version.release < (7, 0, 0): + return "status" + return "operating_mode" + + +### API params +# API parameter functions are in the following format: +# param___ +# So to get the "groups" parameter for the "host.get" method, you would call: +# param_host_get_groups() + + +def param_host_get_groups( + version: Version, +) -> Literal["selectHostGroups", "selectGroups"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # hhttps://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2#host + if version.release < (6, 2, 0): + return "selectGroups" + return "selectHostGroups" + + +### Other compatibility functions + + +def templategroups_supported(version: Version) -> bool: + """Return True if templategroups are supported in the given Zabbix version.""" + return version.release >= (6, 2, 0) diff --git a/zabbix_auto_config/exceptions.py b/zabbix_auto_config/exceptions.py index 1d3ca9d..c92a764 100644 --- a/zabbix_auto_config/exceptions.py +++ b/zabbix_auto_config/exceptions.py @@ -1,5 +1,74 @@ from __future__ import annotations +from typing import TYPE_CHECKING +from typing import Any +from typing import Optional + +if TYPE_CHECKING: + from httpx import Response as HTTPResponse + + from zabbix_auto_config.pyzabbix.types import ParamsType + from zabbix_auto_config.pyzabbix.types import ZabbixAPIResponse + + +class PyZabbixError(Exception): + """Base exception class for PyZabbix exceptions.""" + + +class ZabbixAPIException(PyZabbixError): + # Extracted from pyzabbix, hence *Exception suffix instead of *Error + """Base exception class for Zabbix API exceptions.""" + + def reason(self) -> str: + return "" + + +class ZabbixAPIRequestError(ZabbixAPIException): + """Zabbix API response error.""" + + def __init__( + self, + *args: Any, + params: Optional[ParamsType] = None, + api_response: Optional[ZabbixAPIResponse] = None, + response: Optional[HTTPResponse] = None, + ) -> None: + super().__init__(*args) + self.params = params + self.api_response = api_response + self.response = response + + def reason(self) -> str: + if self.api_response and self.api_response.error: + reason = ( + f"({self.api_response.error.code}) {self.api_response.error.message}" + ) + if self.api_response.error.data: + reason += f" {self.api_response.error.data}" + elif self.response and self.response.text: + reason = self.response.text + else: + reason = str(self) + return reason + + +class ZabbixAPIResponseParsingError(ZabbixAPIRequestError): + """Zabbix API request error.""" + + +class ZabbixAPICallError(ZabbixAPIException): + """Zabbix API request error.""" + + def __str__(self) -> str: + msg = super().__str__() + if self.__cause__ and isinstance(self.__cause__, ZabbixAPIRequestError): + msg = f"{msg}: {self.__cause__.reason()}" + return msg + + +class ZabbixNotFoundError(ZabbixAPICallError): + """A Zabbix API resource was not found.""" + class ZACException(Exception): def __init__(self, *args, **kwargs): diff --git a/zabbix_auto_config/failsafe.py b/zabbix_auto_config/failsafe.py new file mode 100644 index 0000000..e89e64c --- /dev/null +++ b/zabbix_auto_config/failsafe.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import logging +from typing import List + +from zabbix_auto_config import models +from zabbix_auto_config.exceptions import ZACException +from zabbix_auto_config.models import Settings +from zabbix_auto_config.models import ZacSettings + + +def check_failsafe(config: Settings, to_add: List[str], to_remove: List[str]) -> None: + """Check if number of hosts to add/remove exceeds the failsafe and handle appropriately.""" + failsafe = config.zabbix.failsafe + if len(to_remove) <= failsafe and len(to_add) <= failsafe: + return + + # Failsafe exceeded - check for failsafe OK file + if check_failsafe_ok_file(config.zac): + return + + # Failsafe OK file validation failed + # We must write the hosts to add/remove and raise an exception + write_failsafe_hosts(config.zac, to_add, to_remove) + logging.warning( + "Too many hosts to change (failsafe=%d). Remove: %d, Add: %d. Aborting", + failsafe, + len(to_remove), + len(to_add), + ) + raise ZACException("Failsafe triggered") + + +def check_failsafe_ok_file(config: ZacSettings) -> bool: + """Check the failsafe OK file and returns True if application should proceed. + + Attempts to delete the failsafe OK file if it exists. + Depending on the configuration, succeeding in deleting the file may + be required to proceed with changes.""" + # Check for presence of file + if not config.failsafe_ok_file: + logging.info("No failsafe OK file configured.") + return False + if not config.failsafe_ok_file.exists(): + logging.info( + "Failsafe OK file %s does not exist. Create it to approve changes. The ZAC process must have permission to delete the file.", + config.failsafe_ok_file, + ) + return False + # File exists, attempt to delete it + try: + config.failsafe_ok_file.unlink() + except OSError as e: + logging.error("Unable to delete failsafe OK file: %s", e) + if config.failsafe_ok_file_strict: + return False # failed to delete in strict mode + # NOTE: should this be an INFO or DEBUG log instead? + logging.warning("Continuing with changes despite failed deletion.") + logging.info("Failsafe OK file exists. Proceeding with changes.") + return True + + +def write_failsafe_hosts( + config: ZacSettings, to_add: List[str], to_remove: List[str] +) -> None: + """Write a list of hosts to add and remove to a file when the failsafe is reached. + + Uses the failsafe file defined in the config. + Does nothing if no failsafe file is defined. + """ + if not config.failsafe_file: + logging.warning( + "No failsafe file configured, cannot write hosts to add/remove." + ) + return + h = models.HostActions(add=to_add, remove=to_remove) + h.write_json(config.failsafe_file) + logging.info( + "Wrote list of hosts to add and remove: %s", + config.failsafe_file, + ) diff --git a/zabbix_auto_config/models.py b/zabbix_auto_config/models.py index a73bc4d..f19a89c 100644 --- a/zabbix_auto_config/models.py +++ b/zabbix_auto_config/models.py @@ -14,13 +14,15 @@ from pydantic import BaseModel as PydanticBaseModel from pydantic import ConfigDict from pydantic import Field +from pydantic import RootModel from pydantic import ValidationInfo from pydantic import field_serializer from pydantic import field_validator from pydantic import model_validator from typing_extensions import Annotated +from typing_extensions import Self -from . import utils +from zabbix_auto_config import utils # TODO: Models aren't validated when making changes to a set/list. Why? How to handle? @@ -36,7 +38,7 @@ def _check_unknown_fields(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Checks for unknown fields and logs a warning if any are found. Does not log warnings if extra is set to `Extra.allow`. """ - if cls.model_config["extra"] == "allow": + if cls.model_config.get("extra") == "allow": return values for key in values: if key not in cls.model_fields: @@ -82,6 +84,8 @@ class ZabbixSettings(ConfigBaseModel): # These groups are not managed by ZAC beyond their creation. extra_siteadmin_hostgroup_prefixes: Set[str] = set() + prefix_separator: str = "-" + @field_validator("timeout") @classmethod def _validate_timeout(cls, v: Optional[int]) -> Optional[int]: @@ -90,6 +94,51 @@ def _validate_timeout(cls, v: Optional[int]) -> Optional[int]: return v +class ZabbixHostSettings(ConfigBaseModel): + remove_from_maintenance: bool = False + """Remove a host from all its maintenances when disabling it""" + + +class ProcessSettings(ConfigBaseModel): + update_interval: int = Field(default=60, ge=0) + + +# TODO: Future expansion of individual process settings +class SourceMergerSettings(ProcessSettings): + pass + + +class HostUpdaterSettings(ProcessSettings): + pass + + +class HostGroupUpdaterSettings(ProcessSettings): + pass + + +class TemplateUpdaterSettings(ProcessSettings): + pass + + +class GarbageCollectorSettings(ProcessSettings): + enabled: bool = False + """Remove disabled hosts from maintenances and triggers.""" + delete_empty_maintenance: bool = False + """Delete maintenance periods if they are empty after removing disabled hosts.""" + + +class ProcessesSettings(ConfigBaseModel): + """Settings for the various ZAC processes""" + + source_merger: SourceMergerSettings = SourceMergerSettings() + host_updater: HostUpdaterSettings = HostUpdaterSettings() + hostgroup_updater: HostGroupUpdaterSettings = HostGroupUpdaterSettings() + template_updater: TemplateUpdaterSettings = TemplateUpdaterSettings() + garbage_collector: GarbageCollectorSettings = GarbageCollectorSettings( + update_interval=86400 # every 24 hours + ) + + class ZacSettings(ConfigBaseModel): source_collector_dir: str host_modifier_dir: str @@ -99,6 +148,7 @@ class ZacSettings(ConfigBaseModel): failsafe_file: Optional[Path] = None failsafe_ok_file: Optional[Path] = None failsafe_ok_file_strict: bool = True + process: ProcessesSettings = ProcessesSettings() @field_validator("health_file", "failsafe_file", "failsafe_ok_file", mode="after") @classmethod @@ -171,7 +221,7 @@ class SourceCollectorSettings(ConfigBaseModel, extra="allow"): ) @model_validator(mode="after") - def _validate_error_duration_is_greater(self) -> "SourceCollectorSettings": + def _validate_error_duration_is_greater(self) -> Self: # If no tolerance, we don't need to be concerned with how long errors # are kept on record, because a single error will disable the collector. if self.error_tolerance <= 0: @@ -202,25 +252,34 @@ class Interface(BaseModel): model_config = ConfigDict(validate_assignment=True) @model_validator(mode="after") - def type_2_must_have_details(self) -> "Interface": + def type_2_must_have_details(self) -> Self: if self.type == 2 and not self.details: raise ValueError("Interface of type 2 must have details set") return self class Host(BaseModel): + """A host collected by ZAC. + + Not to be confused with `zabbix_auto_config.pyzabbix.types.Host`, + which is a Zabbix host fetched from the Zabbix API. + This model represents a host collected from various sources + before it is turned into a Zabbix host.""" + + # Required fields enabled: bool hostname: str - + # Optional fields importance: Optional[Annotated[int, Field(ge=0)]] = None interfaces: List[Interface] = [] inventory: Dict[str, str] = {} - macros: Optional[None] = None # TODO: What should macros look like? + macros: Optional[Any] = None properties: Set[str] = set() - proxy_pattern: Optional[str] = None # NOTE: replace with Optional[typing.Pattern]? + proxy_pattern: Optional[str] = None siteadmins: Set[str] = set() sources: Set[str] = set() tags: Set[Tuple[str, str]] = set() + model_config = ConfigDict(validate_assignment=True, revalidate_instances="always") @model_validator(mode="before") @@ -319,3 +378,17 @@ class HostActions(BaseModel): def write_json(self, path: Path) -> None: """Writes a JSON serialized representation of self to a file.""" utils.write_file(path, self.model_dump_json(indent=2)) + + +class HostsSerializer(RootModel[List[Host]]): + root: List[Host] + + +def hosts_to_json(hosts: List[Host], indent: int = 2) -> str: + """Convert a list of Host objects to a JSON string.""" + return HostsSerializer(root=hosts).model_dump_json(indent=indent) + + +def print_hosts(hosts: List[Host], indent: int = 2) -> None: + """Print a list of Host objects to stdout as JSON.""" + print(hosts_to_json(hosts, indent=indent)) diff --git a/zabbix_auto_config/processing.py b/zabbix_auto_config/processing.py index f010ddc..b9f3698 100644 --- a/zabbix_auto_config/processing.py +++ b/zabbix_auto_config/processing.py @@ -1,10 +1,10 @@ from __future__ import annotations import datetime -import importlib import itertools import logging import multiprocessing +import multiprocessing.synchronize import os import os.path import queue @@ -17,32 +17,54 @@ from collections import defaultdict from enum import Enum from typing import TYPE_CHECKING +from typing import Any from typing import Dict from typing import List from typing import Optional from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +import httpx import psycopg2 -import pyzabbix -import requests.exceptions from packaging.version import Version from pydantic import ValidationError -from . import exceptions -from . import models -from . import utils -from ._types import HostModifierDict -from ._types import HostModifierModule -from ._types import SourceCollectorModule -from .errcount import RollingErrorCounter -from .state import State +from zabbix_auto_config import compat +from zabbix_auto_config import models +from zabbix_auto_config import utils +from zabbix_auto_config._types import HostModifier +from zabbix_auto_config._types import SourceCollectorModule +from zabbix_auto_config._types import ZacTags +from zabbix_auto_config.errcount import RollingErrorCounter +from zabbix_auto_config.exceptions import SourceCollectorError +from zabbix_auto_config.exceptions import SourceCollectorTypeError +from zabbix_auto_config.exceptions import ZabbixAPIException +from zabbix_auto_config.exceptions import ZabbixNotFoundError +from zabbix_auto_config.exceptions import ZACException +from zabbix_auto_config.failsafe import check_failsafe +from zabbix_auto_config.pyzabbix.client import ZabbixAPI +from zabbix_auto_config.pyzabbix.enums import InterfaceType +from zabbix_auto_config.pyzabbix.enums import InventoryMode +from zabbix_auto_config.pyzabbix.enums import MonitoringStatus +from zabbix_auto_config.pyzabbix.types import CreateHostInterfaceDetails +from zabbix_auto_config.pyzabbix.types import Host +from zabbix_auto_config.pyzabbix.types import HostGroup +from zabbix_auto_config.pyzabbix.types import HostInterface +from zabbix_auto_config.pyzabbix.types import Maintenance +from zabbix_auto_config.pyzabbix.types import ModelWithHosts +from zabbix_auto_config.pyzabbix.types import Proxy +from zabbix_auto_config.pyzabbix.types import Template +from zabbix_auto_config.pyzabbix.types import UpdateHostInterfaceDetails +from zabbix_auto_config.state import State if TYPE_CHECKING: from psycopg2.extensions import cursor as Cursor class BaseProcess(multiprocessing.Process): - def __init__(self, name: str, state: State): + def __init__(self, name: str, state: State) -> None: super().__init__() self.name = name self.state = state @@ -53,12 +75,13 @@ def __init__(self, name: str, state: State): self.state.set_ok() self.stop_event = multiprocessing.Event() - def run(self): - logging.info("Process starting") + def run(self) -> None: + logging.debug("Process starting") with SignalHandler(self.stop_event): while not self.stop_event.is_set(): - if not multiprocessing.parent_process().is_alive(): + parent_process = multiprocessing.parent_process() + if parent_process is None or not parent_process.is_alive(): logging.error("Parent is dead. Stopping") self.stop_event.set() break @@ -77,10 +100,12 @@ def run(self): self.state.set_ok() except Exception as e: # These are the error types we handle ourselves then continue - if isinstance(e, requests.exceptions.Timeout): + if isinstance(e, httpx.TimeoutException): logging.error("Timeout exception: %s", str(e)) - elif isinstance(e, exceptions.ZACException): + elif isinstance(e, ZACException): logging.error("Work exception: %s", str(e)) + elif isinstance(e, ZabbixAPIException): + logging.error("API exception: %s", str(e)) else: raise e # all other exceptions are fatal self.state.set_error(e) @@ -97,23 +122,27 @@ def run(self): logging.info("Process exiting") - def work(self): + def work(self) -> None: pass class SignalHandler: - def __init__(self, event): + def __init__(self, event: multiprocessing.synchronize.Event) -> None: self.event = event + self.old_sigint_handler = signal.getsignal(signal.SIGINT) + self.old_sigterm_handler = signal.getsignal(signal.SIGTERM) - def __enter__(self): + def __enter__(self) -> None: + # Set new signal handlers when entering the context + # Calling signal.signal() assigns new handler and returns the old one self.old_sigint_handler = signal.signal(signal.SIGINT, self._handler) self.old_sigterm_handler = signal.signal(signal.SIGTERM, self._handler) - def __exit__(self, *args): + def __exit__(self, *args: Any) -> None: signal.signal(signal.SIGINT, self.old_sigint_handler) signal.signal(signal.SIGTERM, self.old_sigterm_handler) - def _handler(self, signum, frame): + def _handler(self, signum: int, frame: Any) -> None: logging.info("Received signal: %s", signal.Signals(signum).name) self.event.set() @@ -126,7 +155,7 @@ def __init__( module: SourceCollectorModule, config: models.SourceCollectorSettings, source_hosts_queue: multiprocessing.Queue, - ): + ) -> None: super().__init__(name, state) self.module = module self.config = config @@ -137,6 +166,7 @@ def __init__( self.update_interval = self.config.update_interval # Pop off the config fields from the config we pass to the module + # Leaves only the custom options defined for the collector in the config self.collector_config = config.model_dump() for key in self.config.model_fields: self.collector_config.pop(key, None) @@ -149,14 +179,14 @@ def __init__( tolerance=self.config.error_tolerance, ) - def work(self): + def work(self) -> None: # If we are disabled, we must check if we should be re-enabled. # If not, we raise a ZACException, so that the state of the process # is marked as not ok. if self.disabled: if self.disabled_until > datetime.datetime.now(): time_left = self.disabled_until - datetime.datetime.now() - raise exceptions.ZACException( + raise ZACException( f"Source is disabled for {utils.timedelta_to_str(time_left)}" ) else: @@ -179,7 +209,7 @@ def work(self): # TODO: raise exception with message above or just an empty exception? else: self.disable() - raise exceptions.ZACException( + raise ZACException( f"Failed to collect from source {self.name!r}: {e}" ) from e @@ -212,7 +242,7 @@ def collect(self) -> None: hosts = self.module.collect(**self.collector_config) assert isinstance(hosts, list), "Collect module did not return a list" except Exception as e: - raise exceptions.SourceCollectorError(e) from e + raise SourceCollectorError(e) from e valid_hosts = [] # type: List[models.Host] for host in hosts: @@ -221,13 +251,14 @@ def collect(self) -> None: break if not isinstance(host, models.Host): - raise exceptions.SourceCollectorTypeError( + raise SourceCollectorTypeError( f"Collected object is not a Host object: {host!r}. Type: {type(host)}" ) host.sources = set([self.name]) valid_hosts.append(host) + # Add source hosts to queue source_hosts = { "source": self.name, "hosts": valid_hosts, @@ -257,29 +288,40 @@ class HostAction(Enum): class SourceHandlerProcess(BaseProcess): - def __init__(self, name, state, db_uri, source_hosts_queues): + def __init__( + self, + name: str, + state: State, + db_uri: str, + source_hosts_queues: List[multiprocessing.Queue], + ) -> None: super().__init__(name, state) self.db_uri = db_uri self.db_source_table = "hosts_source" + # NOTE: This interval should not be changed! + # A low value here makes it possible to constantly poll the + # source host queues for new hosts. + self.update_interval = 1 + try: self.db_connection = psycopg2.connect(self.db_uri) # TODO: Test connection? Cursor? except psycopg2.OperationalError as e: logging.error("Unable to connect to database.") - raise exceptions.ZACException(*e.args) + raise ZACException(*e.args) self.source_hosts_queues = source_hosts_queues for source_hosts_queue in self.source_hosts_queues: source_hosts_queue.cancel_join_thread() # Don't wait for empty queue when exiting - def work(self): + def work(self) -> None: + # Collect from all queues for source_hosts_queue in self.source_hosts_queues: if self.stop_event.is_set(): logging.debug("Told to stop. Breaking") break - try: source_hosts = source_hosts_queue.get_nowait() except queue.Empty: @@ -374,7 +416,7 @@ def handle_source_hosts(self, source: str, hosts: List[models.Host]) -> None: actions[action] += 1 logging.info( - "Done handling hosts from source, '%s', in %.2f seconds. Equal hosts: %d, replaced hosts: %d, inserted hosts: %d, removed hosts: %d. Next update: %s", + "Done handling hosts from source, '%s', in %.2f seconds. Equal hosts: %d, updated hosts: %d, inserted hosts: %d, removed hosts: %d. Next update: %s", source, time.time() - start_time, actions[HostAction.NO_CHANGE], @@ -385,21 +427,25 @@ def handle_source_hosts(self, source: str, hosts: List[models.Host]) -> None: ) +HostInterfaceDetailsT = TypeVar( + "HostInterfaceDetailsT", CreateHostInterfaceDetails, UpdateHostInterfaceDetails +) + + class SourceMergerProcess(BaseProcess): - def __init__(self, name, state, db_uri, host_modifier_dir): + def __init__( + self, + name: str, + state: State, + db_uri: str, + host_modifiers: List[HostModifier], + ) -> None: super().__init__(name, state) self.db_uri = db_uri self.db_source_table = "hosts_source" self.db_hosts_table = "hosts" - self.host_modifier_dir = host_modifier_dir - - self.host_modifiers = self.get_host_modifiers() - logging.info( - "Loaded %d host modifiers: %s", - len(self.host_modifiers), - ", ".join([repr(modifier["name"]) for modifier in self.host_modifiers]), - ) + self.host_modifiers = host_modifiers try: self.db_connection = psycopg2.connect(self.db_uri) @@ -410,43 +456,7 @@ def __init__(self, name, state, db_uri, host_modifier_dir): self.update_interval = 60 - def get_host_modifiers(self) -> List[HostModifierDict]: - sys.path.append(self.host_modifier_dir) - - try: - module_names = [ - filename[:-3] - for filename in os.listdir(self.host_modifier_dir) - if filename.endswith(".py") - ] - except FileNotFoundError: - logging.error( - "Host modififier directory %s does not exist.", self.host_modifier_dir - ) - sys.exit(1) - - host_modifiers = [] # type: List[HostModifierDict] - - for module_name in module_names: - module = importlib.import_module(module_name) - - if not isinstance(module, HostModifierModule): - logging.warning( - "Module '%s' is not a valid host modifier module. Skipping.", - module_name, - ) - continue - - host_modifier = { - "name": module_name, - "module": module, - } # type: HostModifierDict - - host_modifiers.append(host_modifier) - - return host_modifiers - - def work(self): + def work(self) -> None: self.merge_sources() def merge_hosts(self, hosts: List[models.Host]) -> models.Host: @@ -470,9 +480,7 @@ def handle_host( for host_modifier in self.host_modifiers: try: - modified_host = host_modifier["module"].modify( - host.model_copy(deep=True) - ) + modified_host = host_modifier.module.modify(host.model_copy(deep=True)) assert isinstance( modified_host, models.Host ), f"Modifier returned invalid type: {type(modified_host)}" @@ -485,13 +493,13 @@ def handle_host( logging.warning( "Host, '%s', was modified to be invalid by modifier: '%s'. Error: %s", host.hostname, - host_modifier["name"], + host_modifier.name, str(e), ) except Exception as e: logging.warning( "Error when running modifier %s on host '%s': %s", - host_modifier["name"], + host_modifier.name, host.hostname, str(e), ) @@ -548,7 +556,7 @@ def get_hosts(self, cursor: "Cursor") -> Dict[str, models.Host]: hosts[host_model.hostname] = host_model return hosts - def merge_sources(self): + def merge_sources(self) -> None: start_time = time.time() logging.info("Merge starting") actions = Counter() # type: Counter[HostAction] @@ -587,13 +595,13 @@ def merge_sources(self): break source_hosts = source_hosts_map.get(hostname) - host = hosts.get(hostname) if not source_hosts: logging.warning( "Host '%s' not found in source hosts table", hostname ) continue + host = hosts.get(hostname) host_action = self.handle_host(db_cursor, host, source_hosts) actions[host_action] += 1 @@ -609,7 +617,9 @@ def merge_sources(self): class ZabbixUpdater(BaseProcess): - def __init__(self, name, state, db_uri, settings: models.Settings): + def __init__( + self, name: str, state: State, db_uri: str, settings: models.Settings + ) -> None: super().__init__(name, state) self.db_uri = db_uri @@ -620,33 +630,34 @@ def __init__(self, name, state, db_uri, settings: models.Settings): # TODO: Test connection? Cursor? except psycopg2.OperationalError as e: logging.error("Unable to connect to database. Process exiting with error") - raise exceptions.ZACException(*e.args) + raise ZACException(*e.args) self.config = settings.zabbix self.settings = settings - self.update_interval = 60 + self.update_interval = 60 # default. Overriden in subclasses pyzabbix_logger = logging.getLogger("pyzabbix") pyzabbix_logger.setLevel(logging.ERROR) - self.api = pyzabbix.ZabbixAPI( + self.api = ZabbixAPI( self.config.url, timeout=self.config.timeout, # timeout for connect AND read + read_only=self.config.dryrun, # prevent accidental changes ) try: self.api.login(self.config.username, self.config.password) - except requests.exceptions.ConnectionError as e: + except httpx.ConnectError as e: logging.error("Error while connecting to Zabbix: %s", self.config.url) - raise exceptions.ZACException(*e.args) - except (pyzabbix.ZabbixAPIException, requests.exceptions.HTTPError) as e: - logging.error("Unable to login to Zabbix API: %s", str(e)) - raise exceptions.ZACException(*e.args) - except requests.exceptions.Timeout as e: + raise ZACException(*e.args) + except httpx.TimeoutException as e: logging.error( "Timed out while connecting to Zabbix API: %s", self.config.url ) - raise exceptions.ZACException(*e.args) + raise ZACException(*e.args) + except (ZabbixAPIException, httpx.HTTPError) as e: + logging.error("Unable to login to Zabbix API: %s", str(e)) + raise ZACException(*e.args) self.property_template_map = utils.read_map_file( os.path.join(self.config.map_dir, "property_template_map.txt") @@ -661,7 +672,7 @@ def __init__(self, name, state, db_uri, settings: models.Settings): ver = self.api.apiinfo.version() self.zabbix_version = Version(ver) - def work(self): + def work(self) -> None: start_time = time.time() logging.info("Zabbix update starting") self.do_update() @@ -671,331 +682,509 @@ def work(self): self.next_update.isoformat(timespec="seconds"), ) - def do_update(self): + def do_update(self) -> None: pass + def get_db_hosts(self) -> Dict[str, models.Host]: + with self.db_connection, self.db_connection.cursor() as db_cursor: + db_cursor.execute( + f"SELECT data FROM {self.db_hosts_table} WHERE data->>'enabled' = 'true'" + ) + db_hosts = {} # type: Dict[str, models.Host] + for res in db_cursor.fetchall(): + try: + host = models.Host(**res[0]) + except ValidationError as e: + # TODO: log invalid host then remove it from the database + logging.exception("Invalid host in hosts table: %s", e) + except Exception as e: + logging.exception("Error when parsing host from hosts table: %s", e) + else: + db_hosts[host.hostname] = host + return db_hosts -class ZabbixHostUpdater(ZabbixUpdater): - def disable_host(self, zabbix_host): - if not self.config.dryrun: - try: - disabled_hostgroup_id = self.api.hostgroup.get( - filter={"name": self.config.hostgroup_disabled} - )[0]["groupid"] - self.api.host.update( - hostid=zabbix_host["hostid"], - status=1, - templates=[], - groups=[{"groupid": disabled_hostgroup_id}], - ) - logging.info( - "Disabling host: '%s' (%s)", - zabbix_host["host"], - zabbix_host["hostid"], - ) - except pyzabbix.ZabbixAPIException as e: - logging.error( - "Error when disabling host '%s' (%s): %s", - zabbix_host["host"], - zabbix_host["hostid"], - e.args, - ) - except IndexError: - logging.critical( - "Disabled host group '%s' does not exist in Zabbix. Cannot disable host '%s'", - self.config.hostgroup_disabled, - zabbix_host.get("host"), - ) - self.stop_event.set() - else: + def get_hostgroups(self, name: Optional[str] = None) -> List[HostGroup]: + try: + names = [name] if name else [] + hostgroups = self.api.get_hostgroups(*names) + except ZabbixAPIException as e: + raise ZACException("Error when fetching hostgroups: %s", e) + return hostgroups + + +class ZabbixGarbageCollector(ZabbixUpdater): + """Cleans up disabled hosts from maintenances in Zabbix.""" + + def __init__( + self, name: str, state: State, db_uri: str, settings: models.Settings + ) -> None: + super().__init__(name, state, db_uri, settings) + + self.update_interval = ( + self.settings.zac.process.garbage_collector.update_interval + ) + + def filter_disabled_hosts( + self, model: ModelWithHosts + ) -> Tuple[List[Host], List[Host]]: + """Returns a tuple of (active_hosts, disabled_hosts) from a model.""" + keep: List[Host] = [] + remove: List[Host] = [] + for host in model.hosts: + if host.status == MonitoringStatus.OFF: + remove.append(host) + else: + keep.append(host) + return keep, remove + + def get_maintenances(self, disabled_hosts: List[Host]) -> List[Maintenance]: + """Fetch all maintenances with disabled hosts in Zabbix.""" + return self.api.get_maintenances(hosts=disabled_hosts, select_hosts=True) + + def remove_disabled_hosts_from_maintenance(self, maintenance: Maintenance) -> None: + """Remove all disabled hosts from a maintenance.""" + hosts_keep, hosts_remove = self.filter_disabled_hosts(maintenance) + + if self.config.dryrun: logging.info( - "DRYRUN: Disabling host: '%s' (%s)", - zabbix_host["host"], - zabbix_host["hostid"], + "DRYRUN: Removing disabled hosts from maintenance '%s': %s", + maintenance.name, + ", ".join([host.host for host in hosts_remove]), ) + return - def enable_host(self, db_host): - # TODO: Set correct proxy when enabling - hostname = db_host.hostname - if not self.config.dryrun: - try: - hostgroup_id = self.api.hostgroup.get( - filter={"name": self.config.hostgroup_all} - )[0]["groupid"] - - hosts = self.api.host.get(filter={"name": hostname}) - if hosts: - host = hosts[0] - self.api.host.update( - hostid=host["hostid"], - status=0, - groups=[{"groupid": hostgroup_id}], - ) - logging.info( - "Enabling old host: '%s' (%s)", host["host"], host["hostid"] - ) - else: - interface = { - "dns": hostname, - "ip": "", - "useip": 0, - "type": 1, - "port": 10050, - "main": 1, - } - result = self.api.host.create( - host=hostname, - status=0, - groups=[{"groupid": hostgroup_id}], - interfaces=[interface], - ) - logging.info( - "Enabling new host: '%s' (%s)", hostname, result["hostids"][0] - ) - except pyzabbix.ZabbixAPIException as e: + # No disabled hosts in maintenance (Should never happen) + if len(hosts_keep) == len(maintenance.hosts): + logging.debug("No disabled hosts in maintenance '%s'", maintenance.name) + # No hosts left in maintenance + elif not hosts_keep: + if self.settings.zac.process.garbage_collector.delete_empty_maintenance: + self.delete_maintenance(maintenance) + else: logging.error( - "Error when enabling/creating host '%s': %s", hostname, e.args - ) - except IndexError: - logging.critical( - "Enabled host group '%s' does not exist in Zabbix. Cannot enable host '%s'", - self.config.hostgroup_all, - hostname, + "Unable to remove disabled hosts from maintenance '%s': no hosts left. Delete maintenance manually.", + maintenance.name, ) - self.stop_event.set() - else: - logging.info("DRYRUN: Enabling host: '%s'", hostname) - - def clear_proxy(self, zabbix_host): - if not self.config.dryrun: - self.api.host.update(hostid=zabbix_host["hostid"], proxy_hostid="0") - logging.info( - "Clearing proxy on host: '%s' (%s)", - zabbix_host["host"], - zabbix_host["hostid"], - ) else: + self.api.update_maintenance(maintenance, hosts_keep) logging.info( - "DRYRUN: Clearing proxy on host: '%s' (%s)", - zabbix_host["host"], - zabbix_host["hostid"], + "Removed disabled hosts from maintenance '%s': %s", + maintenance.name, + ", ".join([host.host for host in hosts_remove]), ) - def set_interface(self, zabbix_host, interface, useip, old_id): - if not self.config.dryrun: - parameters = { - "hostid": zabbix_host["hostid"], - "main": 1, - "port": interface.port, - "type": interface.type, - "useip": int(useip), - } - if useip: - parameters["dns"] = "" - parameters["ip"] = interface.endpoint - else: - parameters["dns"] = interface.endpoint - parameters["ip"] = "" + def delete_maintenance(self, maintenance: Maintenance) -> None: + """Delete a maintenance in Zabbix.""" + if self.config.dryrun: + logging.info("DRYRUN: Deleting maintenance '%s'", maintenance.name) + return + self.api.delete_maintenance(maintenance) + logging.info("Deleted maintenance '%s'", maintenance.name) - if interface.details: - parameters["details"] = interface.details + def cleanup_maintenances(self, disabled_hosts: List[Host]) -> None: + maintenances = self.api.get_maintenances( + hosts=disabled_hosts, select_hosts=True + ) + for maintenance in maintenances: + self.remove_disabled_hosts_from_maintenance(maintenance) - if old_id: - self.api.hostinterface.update(interfaceid=old_id, **parameters) - logging.info( - "Updating old interface (type: %s) on host: '%s' (%s)", - interface.type, - zabbix_host["host"], - zabbix_host["hostid"], - ) - else: - self.api.hostinterface.create(**parameters) - logging.info( - "Creating new interface (type: %s) on host: '%s' (%s)", - interface.type, - zabbix_host["host"], - zabbix_host["hostid"], - ) - else: - logging.info( - "DRYRUN: Setting interface (type: %d) on host: '%s' (%s)", - interface.type, - zabbix_host["host"], - zabbix_host["hostid"], + def do_update(self) -> None: + if not self.settings.zac.process.garbage_collector.enabled: + logging.debug("Garbage collection is disabled") + return + # Get all disabled hosts + disabled_hosts = self.api.get_hosts(status=MonitoringStatus.OFF) + self.cleanup_maintenances(disabled_hosts) + + +class ZabbixHostUpdater(ZabbixUpdater): + def __init__( + self, name: str, state: State, db_uri: str, settings: models.Settings + ) -> None: + super().__init__(name, state, db_uri, settings) + + self.update_interval = self.settings.zac.process.host_updater.update_interval + + # Fetch required host groups on startup + self.disabled_hostgroup = self.get_or_create_hostgroup( + self.config.hostgroup_disabled + ) + self.enabled_hostgroup = self.get_or_create_hostgroup(self.config.hostgroup_all) + + def get_or_create_hostgroup(self, hostgroup: str) -> HostGroup: + """Fetch a host group, creating it if it doesn't exist.""" + try: + return self.api.get_hostgroup(hostgroup) + except ZabbixNotFoundError: + logging.info("Hostgroup '%s' not found. Creating it.", hostgroup) + self.api.create_hostgroup(hostgroup) + return self.api.get_hostgroup(hostgroup) + + def get_maintenances(self, zabbix_host: Host) -> List[Maintenance]: + try: + maintenances = self.api.get_maintenances( + hosts=[zabbix_host], + select_hosts=True, + ) + except ZabbixAPIException as e: + logging.error( + "Error when fetching maintenances for host '%s' (%s): %s", + zabbix_host.host, + zabbix_host.hostid, + e.args, ) + maintenances = [] + return maintenances - def set_inventory_mode(self, zabbix_host, inventory_mode): - if not self.config.dryrun: - self.api.host.update( - hostid=zabbix_host["hostid"], inventory_mode=inventory_mode + def do_remove_host_from_maintenance( + self, zabbix_host: Host, maintenance: Maintenance + ) -> None: + if self.config.dryrun: + logging.info( + "DRYRUN: Removing host %s from maintenance %s", + zabbix_host.host, + maintenance.name, ) + return + + # Determine new hosts list for maintenance + new_hosts = [ + host for host in maintenance.hosts if host.hostid != zabbix_host.hostid + ] + + if not new_hosts: + # NOTE: ZabbixGarbageCollector cleans this up if enabled logging.info( - "Setting inventory_mode (%d) on host: '%s' (%s)", - inventory_mode, - zabbix_host["host"], - zabbix_host["hostid"], + "Maintenance '%s' is empty would be empty if removing host '%s'. Skipping.", + zabbix_host.host, + maintenance.name, + ) + return + + try: + self.api.update_maintenance(maintenance, hosts=new_hosts) + except ZabbixAPIException as e: + logging.error( + "Error when removing host '%s' from maintenance '%s': %s", + zabbix_host.host, + maintenance.name, + e.args, ) else: logging.info( - "DRYRUN: Setting inventory_mode (%d) on host: '%s' (%s)", - inventory_mode, - zabbix_host["host"], - zabbix_host["hostid"], + "Removed host %s from maintenance %s", + zabbix_host.host, + maintenance.name, ) - def set_inventory(self, zabbix_host, inventory): - if not self.config.dryrun: - self.api.host.update(hostid=zabbix_host["hostid"], inventory=inventory) + def remove_host_from_maintenances(self, zabbix_host: Host) -> None: + maintenances = self.get_maintenances(zabbix_host) + for maintenance in maintenances: + self.do_remove_host_from_maintenance(zabbix_host, maintenance) + + def disable_host(self, zabbix_host: Host) -> None: + # Host needs to be removed from all maintenances before it is disabled + self.remove_host_from_maintenances(zabbix_host) + if self.config.dryrun: logging.info( - "Setting inventory (%s) on host: '%s'", inventory, zabbix_host["host"] + "DRYRUN: Disabling host: '%s' (%s)", + zabbix_host.host, + zabbix_host.hostid, + ) + return + + try: + self.api.update_host( + zabbix_host, + status=MonitoringStatus.OFF, + templates=[], + groups=[self.disabled_hostgroup], + ) + except ZabbixAPIException as e: + logging.error( + "Error when disabling host '%s' (%s): %s", + zabbix_host.host, + zabbix_host.hostid, + e.args, ) else: logging.info( - "DRYRUN: Setting inventory (%s) on host: '%s'", - inventory, - zabbix_host["host"], + "Disabled host: '%s' (%s)", + zabbix_host.host, + zabbix_host.hostid, ) - def set_proxy(self, zabbix_host, zabbix_proxy): - if not self.config.dryrun: - self.api.host.update( - hostid=zabbix_host["hostid"], proxy_hostid=zabbix_proxy["proxyid"] + def enable_host(self, db_host: models.Host) -> None: + # TODO: Set correct proxy when enabling + hostname = db_host.hostname + if self.config.dryrun: + logging.info("DRYRUN: Enabling host: '%s'", hostname) + return + + try: + hosts = self.api.get_hosts(hostname, search=False) + + if hosts: + host = hosts[0] + self.api.update_host( + host, status=MonitoringStatus.ON, groups=[self.enabled_hostgroup] + ) + logging.info("Enabled old host: '%s' (%s)", host.host, host.hostid) + else: + interface = HostInterface( + dns=hostname, + ip="", + useip=False, + type=1, + port="10050", + main=1, + ) + hostid = self.api.create_host( + hostname, groups=[self.enabled_hostgroup], interfaces=[interface] + ) + logging.info("Enabled new host: '%s' (%s)", hostname, hostid) + except ZabbixAPIException as e: + logging.error( + "Error when enabling/creating host '%s': %s", hostname, e.args ) + + def clear_proxy(self, zabbix_host: Host) -> None: + if self.config.dryrun: logging.info( - "Setting proxy (%s) on host: '%s' (%s)", - zabbix_proxy["host"], - zabbix_host["host"], - zabbix_host["hostid"], + "DRYRUN: Clearing proxy on host: '%s' (%s)", + zabbix_host.host, + zabbix_host.hostid, ) + return + try: + self.api.clear_host_proxy(zabbix_host) + except ZabbixAPIException as e: + logging.error("%s", e) # Just log the error verbatim else: + logging.info("Cleared proxy on host %s", zabbix_host) + + def set_interface( + self, + zabbix_host: Host, + interface: models.Interface, + useip: bool, + old_interface: Optional[HostInterface] = None, + ) -> None: + if self.config.dryrun: logging.info( - "DRYRUN: Setting proxy (%s) on host: '%s' (%s)", - zabbix_proxy["host"], - zabbix_host["host"], - zabbix_host["hostid"], + "DRYRUN: Setting interface (type: %d) on host: %s", + interface.type, + zabbix_host, ) + return - def set_tags(self, zabbix_host, tags): - if not self.config.dryrun: - zabbix_tags = utils.zac_tags2zabbix_tags(tags) - self.api.host.update(hostid=zabbix_host["hostid"], tags=zabbix_tags) - logging.info( - "Setting tags (%s) on host: '%s' (%s)", - tags, - zabbix_host["host"], - zabbix_host["hostid"], + if useip: + dns = None + ip = interface.endpoint + else: + dns = interface.endpoint + ip = None + + try: + ifacetype = InterfaceType(interface.type) + except ValueError: + logging.error( + "Invalid/unknown interface type (%d) for host '%s'", + interface.type, + zabbix_host.host, ) + return + + # Update existing interface + if old_interface: + self.update_host_interface( + zabbix_host, + interface, + old_interface, + ifacetype, + useip, + dns, + ip, + ) + # Create new interface else: - logging.info( - "DRYRUN: Setting tags (%s) on host: '%s' (%s)", - tags, - zabbix_host["host"], - zabbix_host["hostid"], + self.create_host_interface( + zabbix_host, + interface, + ifacetype, + useip, + dns, + ip, ) - def handle_failsafe_limit(self, to_add: List[str], to_remove: List[str]) -> None: - """Handles situations where the number of hosts to add/remove exceeds the failsafe. + def create_host_interface( + self, + zabbix_host: Host, + interface: models.Interface, + ifacetype: InterfaceType, + useip: bool, + dns: Optional[str], + ip: Optional[str], + ) -> None: + details = self.validate_interface_details( + CreateHostInterfaceDetails, interface, zabbix_host + ) + self.api.create_host_interface( + zabbix_host, + main=True, + port=interface.port, + type=ifacetype, + use_ip=useip, + dns=dns, + ip=ip, + details=details, + ) + logging.info( + "Created new interface (type: %s) on host: %s", + ifacetype.name, + zabbix_host, + ) - If a failsafe OK file exists, the method will attempt to remove it - and proceed with the changes. Otherwise, it will write the list of - hosts to add and remove to a failsafe file and raise a ZACException.""" - if self._check_failsafe_ok_file(): - return - # Failsafe OK file does not exist or cannot be deleted. - # We must write the hosts to add/remove and raise an exception - self.write_failsafe_hosts(to_add, to_remove) - logging.warning( - "Too many hosts to change (failsafe=%d). Remove: %d, Add: %d. Aborting", - self.config.failsafe, - len(to_remove), - len(to_add), + def update_host_interface( + self, + zabbix_host: Host, + interface: models.Interface, + old_interface: HostInterface, + ifacetype: InterfaceType, + useip: bool, + dns: Optional[str], + ip: Optional[str], + ) -> None: + details = self.validate_interface_details( + UpdateHostInterfaceDetails, interface, zabbix_host ) - raise exceptions.ZACException("Failsafe triggered") - def write_failsafe_hosts(self, to_add: List[str], to_remove: List[str]) -> None: - if not self.settings.zac.failsafe_file: + self.api.update_host_interface( + old_interface, + hostid=zabbix_host.hostid, + main=True, + port=interface.port, + type=ifacetype, + use_ip=useip, + dns=dns, + ip=ip, + details=details, + ) + logging.info( + "Updated old interface (type: %s) on host: %s", + interface.type, + zabbix_host, + ) + + def validate_interface_details( + self, cls: Type[HostInterfaceDetailsT], interface: models.Interface, host: Host + ) -> Optional[HostInterfaceDetailsT]: + """Validate interface details from a source host. + + Attempts to construct a model used to create or update a host interface + from host interface details of a source host.""" + if not interface.details: + return None # nothing to validate + try: + return cls.model_validate(interface.details) + except ValidationError: + logging.error( + "Invalid interface details (%s) for host '%s'", + interface.details, + host.host, + ) + return None + + def set_inventory_mode( + self, zabbix_host: Host, inventory_mode: InventoryMode + ) -> None: + if self.config.dryrun: logging.info( - "Unable to write failsafe hosts. No diagnostics directory configured." + "DRYRUN: Setting inventory_mode (%d) on host: %s", + inventory_mode, + zabbix_host, ) return - h = models.HostActions(add=to_add, remove=to_remove) - h.write_json(self.settings.zac.failsafe_file) + + self.api.update_host(zabbix_host, inventory_mode=inventory_mode) logging.info( - "Wrote list of hosts to add and remove to %s", - self.settings.zac.failsafe_file, + "Setting inventory_mode (%d) on host: %s", inventory_mode, zabbix_host ) - def _check_failsafe_ok_file(self) -> bool: - """Checks the failsafe OK file and returns True if application should proceed.""" - # Check for presence of file - if not self.settings.zac.failsafe_ok_file: - return False - if not self.settings.zac.failsafe_ok_file.exists(): + def set_inventory(self, zabbix_host: Host, inventory: Dict[str, str]) -> None: + if self.config.dryrun: + logging.info( + "DRYRUN: Setting inventory (%s) on host: %s", inventory, zabbix_host + ) + return + # TODO: refactor. Move everything in to ZabbixAPI.update_host? + self.api.update_host_inventory(zabbix_host, inventory) + logging.info("Setting inventory (%s) on host: %s", inventory, zabbix_host) + + def set_proxy(self, zabbix_host: Host, zabbix_proxy: Proxy) -> None: + if self.config.dryrun: logging.info( - "Failsafe OK file %s does not exist. Create it to approve changes.", - self.settings.zac.failsafe_ok_file, + "DRYRUN: Setting proxy %s on host %s", zabbix_proxy.name, zabbix_host ) - return False - # File exists, attempt to delete it + return try: - self.settings.zac.failsafe_ok_file.unlink() - except OSError as e: - logging.error("Unable to delete failsafe OK file: %s", e) - if self.settings.zac.failsafe_ok_file_strict: - return False - logging.warning("Continuing with changes despite failed deletion.") - logging.info("Failsafe OK file exists. Proceeding with changes.") - return True - - def do_update(self): - with self.db_connection, self.db_connection.cursor() as db_cursor: - db_cursor.execute( - f"SELECT data FROM {self.db_hosts_table} WHERE data->>'enabled' = 'true'" + self.api.update_host_proxy(zabbix_host, zabbix_proxy) + except ZabbixAPIException as e: + logging.error( + "Failed to set proxy %s on host %s: %s", + zabbix_proxy.name, + zabbix_host, + e, ) - db_hosts = { - t[0]["hostname"]: models.Host(**t[0]) for t in db_cursor.fetchall() - } - # status:0 = monitored, flags:0 = non-discovered host - zabbix_hosts = { - host["host"]: host - for host in self.api.host.get( - filter={"status": 0, "flags": 0}, - output=[ - "hostid", - "host", - "status", - "flags", - "proxy_hostid", - "inventory_mode", - ], - selectGroups=["groupid", "name"], - selectInterfaces=[ - "dns", - "interfaceid", - "ip", - "main", - "port", - "type", - "useip", - "details", - ], - selectInventory=self.config.managed_inventory, - selectParentTemplates=["templateid", "host"], - selectTags=["tag", "value"], + else: + logging.info("Set proxy %s on host %s", zabbix_proxy.name, zabbix_host) + + def set_tags(self, zabbix_host: Host, tags: ZacTags) -> None: + if self.config.dryrun: + logging.info( + "DRYRUN: Setting tags (%s) on host: %s", + tags, + zabbix_host, ) - } - zabbix_proxies = { - proxy["host"]: proxy - for proxy in self.api.proxy.get(output=["proxyid", "host", "status"]) - } - zabbix_managed_hosts = [] - zabbix_manual_hosts = [] + return + zabbix_tags = utils.zac_tags2zabbix_tags(tags) + try: + self.api.update_host(zabbix_host, tags=zabbix_tags) + except ZabbixAPIException as e: + logging.error( + "Failed to set tags (%s) on host %s: %s", tags, zabbix_host, e + ) + else: + logging.info("Set tags (%s) on host: %s", tags, zabbix_host) + + def do_update(self) -> None: + db_hosts = self.get_db_hosts() + + zhosts = self.api.get_hosts( + status=MonitoringStatus.ON, + # flags:0 = non-discovered host + flags=0, + select_interfaces=True, + select_inventory=True, + select_templates=True, + select_tags=True, + select_groups=True, + ) + zabbix_hosts = {host.host: host for host in zhosts} + + zproxies = self.api.get_proxies() + zabbix_proxies = {proxy.name: proxy for proxy in zproxies} + if not zabbix_proxies: + logging.warning("No Zabbix proxies found.") + + zabbix_managed_hosts: List[Host] = [] + zabbix_manual_hosts: List[Host] = [] for hostname, host in zabbix_hosts.items(): if self.stop_event.is_set(): logging.debug("Told to stop. Breaking") break - hostgroup_names = [group["name"] for group in host["groups"]] + hostgroup_names = [group.name for group in host.groups] if self.config.hostgroup_manual in hostgroup_names: zabbix_manual_hosts.append(host) else: @@ -1003,8 +1192,8 @@ def do_update(self): db_hostnames = set(db_hosts.keys()) zabbix_hostnames = set(zabbix_hosts.keys()) - zabbix_managed_hostnames = {host["host"] for host in zabbix_managed_hosts} - zabbix_manual_hostnames = {host["host"] for host in zabbix_manual_hosts} + zabbix_managed_hostnames = {host.host for host in zabbix_managed_hosts} + zabbix_manual_hostnames = {host.host for host in zabbix_manual_hosts} hostnames_to_remove = list( zabbix_managed_hostnames - db_hostnames - zabbix_manual_hostnames @@ -1034,11 +1223,7 @@ def do_update(self): logging.debug("In both: %d", len(hostnames_in_both)) # Check if we have too many hosts to add/remove - if ( - len(hostnames_to_remove) > self.config.failsafe - or len(hostnames_to_add) > self.config.failsafe - ): - self.handle_failsafe_limit(hostnames_to_add, hostnames_to_remove) + check_failsafe(self.settings, hostnames_to_add, hostnames_to_remove) for hostname in hostnames_to_remove: if self.stop_event.is_set(): @@ -1065,30 +1250,31 @@ def do_update(self): zabbix_host = zabbix_hosts[hostname] # Check proxy. A host with proxy_pattern should get a proxy that matches the pattern. - zabbix_proxy_id = zabbix_host["proxy_hostid"] + zabbix_proxy_id = zabbix_host.proxyid zabbix_proxy = [ proxy for proxy in zabbix_proxies.values() - if proxy["proxyid"] == zabbix_proxy_id + if proxy.proxyid == zabbix_proxy_id ] current_zabbix_proxy = zabbix_proxy[0] if zabbix_proxy else None if db_host.proxy_pattern: possible_proxies = [ proxy for proxy in zabbix_proxies.values() - if re.match(db_host.proxy_pattern, proxy["host"]) + if re.match(db_host.proxy_pattern, proxy.name) ] if not possible_proxies: logging.error( "Proxy pattern ('%s') for host, '%s' (%s), doesn't match any proxies.", db_host.proxy_pattern, hostname, - zabbix_host["hostid"], + zabbix_host.hostid, ) else: new_proxy = random.choice(possible_proxies) if current_zabbix_proxy and not re.match( - db_host.proxy_pattern, current_zabbix_proxy["host"] + db_host.proxy_pattern, + current_zabbix_proxy.name, ): # Wrong proxy, set new self.set_proxy(zabbix_host, new_proxy) @@ -1101,69 +1287,65 @@ def do_update(self): # Check the main/default interfaces if db_host.interfaces: - zabbix_interfaces = zabbix_host["interfaces"] + zabbix_interfaces = zabbix_host.interfaces - # The API doesn't return the proper, documented types. We need to fix these types - # https://www.zabbix.com/documentation/current/manual/api/reference/hostinterface/object - for zabbix_interface in zabbix_interfaces: - zabbix_interface["type"] = int(zabbix_interface["type"]) - zabbix_interface["main"] = int(zabbix_interface["main"]) - zabbix_interface["useip"] = int(zabbix_interface["useip"]) - - # Restructure object, and filter non main/default interfaces + # Create dict of main interfaces only zabbix_interfaces = { - i["type"]: i for i in zabbix_host["interfaces"] if i["main"] == 1 + i.type: i for i in zabbix_host.interfaces if i.main == 1 } for interface in db_host.interfaces: # We assume that we're using an IP if the endpoint is a valid IP useip = utils.is_valid_ip(interface.endpoint) - if interface.type in zabbix_interfaces: - # This interface type exists on the current zabbix host - # TODO: This logic could probably be simplified and should be refactored - zabbix_interface = zabbix_interfaces[interface.type] + if zabbix_interface := zabbix_interfaces.get(interface.type): if useip and ( - zabbix_interface["ip"] != interface.endpoint - or zabbix_interface["port"] != interface.port - or zabbix_interface["useip"] != useip + zabbix_interface.ip != interface.endpoint + or zabbix_interface.port != interface.port + or zabbix_interface.useip != useip ): # This IP interface is configured wrong, set it self.set_interface( zabbix_host, interface, useip, - zabbix_interface["interfaceid"], + zabbix_interface, ) elif not useip and ( - zabbix_interface["dns"] != interface.endpoint - or zabbix_interface["port"] != interface.port - or zabbix_interface["useip"] != useip + zabbix_interface.dns != interface.endpoint + or zabbix_interface.port != interface.port + or zabbix_interface.useip != useip ): + logging.debug( + "DNS interface of type %s for host '%s' is configured wrong", + interface.type, + db_host.hostname, + ) # This DNS interface is configured wrong, set it self.set_interface( zabbix_host, interface, useip, - zabbix_interface["interfaceid"], + zabbix_interface, ) - if interface.type == 2: - # Check that the interface details are correct. Note - # that responses from the Zabbix API are quoted, so we - # need to convert our natively typed values to strings. - # Also note that the Zabbix API response may include more + if interface.type == 2 and interface.details: + details_dict = zabbix_interface.details + # Check that the interface details are correct. + # Note that the Zabbix API response may include more # information than our back-end; ignore such keys. - # TODO: this is terrible and should be implemented - # using dataclasses for the interface and host types. if not all( - zabbix_interface["details"].get(k, None) == str(v) + str(details_dict.get(k)) == str(v) for k, v in interface.details.items() ): + logging.debug( + "SNMP interface for host '%s' differs from source data. Fixing.", + db_host.hostname, + ) # This SNMP interface is configured wrong, set it. self.set_interface( zabbix_host, interface, useip, - zabbix_interface["interfaceid"], + zabbix_interface, ) else: # This interface is missing, set it @@ -1173,15 +1355,15 @@ def do_update(self): other_zabbix_tags = utils.zabbix_tags2zac_tags( [ tag - for tag in zabbix_host["tags"] - if not tag["tag"].startswith(self.config.tags_prefix) + for tag in zabbix_host.tags + if not tag.tag.startswith(self.config.tags_prefix) ] ) # These are tags outside our namespace/prefix. Keep them. current_tags = utils.zabbix_tags2zac_tags( [ tag - for tag in zabbix_host["tags"] - if tag["tag"].startswith(self.config.tags_prefix) + for tag in zabbix_host.tags + if tag.tag.startswith(self.config.tags_prefix) ] ) db_tags = db_host.tags @@ -1193,10 +1375,10 @@ def do_update(self): if ignored_tags: db_tags = db_tags - ignored_tags logging.warning( - "Tags (%s) not matching tags prefix ('%s') is configured on host '%s'. They will be ignored.", + "Tags (%s) not matching tags prefix ('%s') is configured on host %s. They will be ignored.", ignored_tags, self.config.tags_prefix, - zabbix_host["host"], + zabbix_host, ) tags_to_remove = current_tags - db_tags @@ -1205,27 +1387,27 @@ def do_update(self): if tags_to_remove or tags_to_add: if tags_to_remove: logging.debug( - "Going to remove tags '%s' from host '%s'.", + "Going to remove tags '%s' from host %s.", tags_to_remove, - zabbix_host["host"], + zabbix_host, ) if tags_to_add: logging.debug( - "Going to add tags '%s' to host '%s'.", + "Going to add tags '%s' to host %s.", tags_to_add, - zabbix_host["host"], + zabbix_host, ) self.set_tags(zabbix_host, tags) - if int(zabbix_host["inventory_mode"]) != 1: - self.set_inventory_mode(zabbix_host, 1) + if zabbix_host.inventory_mode != InventoryMode.AUTOMATIC: + self.set_inventory_mode(zabbix_host, InventoryMode.AUTOMATIC) if db_host.inventory: - if zabbix_host["inventory"]: + if zabbix_host.inventory: changed_inventory = { k: v for k, v in db_host.inventory.items() - if db_host.inventory[k] != zabbix_host["inventory"].get(k, None) + if db_host.inventory[k] != zabbix_host.inventory.get(k, None) } else: changed_inventory = db_host.inventory @@ -1254,107 +1436,114 @@ def do_update(self): class ZabbixTemplateUpdater(ZabbixUpdater): - def clear_templates(self, templates, host): - logging.debug("Clearing templates on host: '%s'", host["host"]) - if not self.config.dryrun: - try: - templates = [ - {"templateid": template_id} for _, template_id in templates.items() - ] - self.api.host.update(hostid=host["hostid"], templates_clear=templates) - except pyzabbix.ZabbixAPIException as e: - logging.error( - "Error when clearing templates on host '%s': %s", - host["host"], - e.args, - ) + def __init__( + self, name: str, state: State, db_uri: str, settings: models.Settings + ) -> None: + super().__init__(name, state, db_uri, settings) + self.update_interval = ( + self.settings.zac.process.template_updater.update_interval + ) + + def clear_templates(self, templates: List[Template], host: Host) -> None: + if self.config.dryrun: + logging.debug( + "DRYRUN: Clearing templates %s on host: %s", + ", ".join(t.host for t in templates), + host, + ) + return + + try: + self.api.unlink_templates_from_hosts(templates, [host], clear=True) + except ZabbixAPIException as e: + logging.error("Error when clearing templates on host %s: %s", host, e) else: - logging.debug("DRYRUN: Clearing templates on host: '%s'", host["host"]) + logging.info( + "Cleared templates %s on host: %s", + ", ".join(t.host for t in templates), + host, + ) - def set_templates(self, templates, host): - if not self.config.dryrun: - logging.debug("Setting templates on host: '%s'", host["host"]) - try: - templates = [ - {"templateid": template_id} for _, template_id in templates.items() - ] - self.api.host.update(hostid=host["hostid"], templates=templates) - except pyzabbix.ZabbixAPIException as e: - logging.error( - "Error when setting templates on host '%s': %s", - host["host"], - e.args, - ) + def set_templates(self, templates: List[Template], host: Host) -> None: + # For logging + to_add = ", ".join(f"{t.host!r}" for t in templates) + + if self.config.dryrun: + logging.debug("DRYRUN: Setting templates %s on host: %s", to_add, host) + return + + try: + self.api.link_templates_to_hosts(templates, [host]) + except ZabbixAPIException as e: + logging.error( + "Error when setting templates %s on host %s: %s", to_add, host, e + ) else: - logging.debug("DRYRUN: Setting templates on host: '%s'", host["host"]) + logging.info("Set templates %s on host: %s", to_add, host) - def do_update(self): + def do_update(self) -> None: + # Determine names of templates we are managing managed_template_names = set( itertools.chain.from_iterable(self.property_template_map.values()) ) - zabbix_templates = {} - for zabbix_template in self.api.template.get(output=["host", "templateid"]): - zabbix_templates[zabbix_template["host"]] = zabbix_template["templateid"] + zabbix_templates: Dict[str, Template] = {} + for zabbix_template in self.api.get_templates(): + zabbix_templates[zabbix_template.host] = zabbix_template + managed_template_names = managed_template_names.intersection( set(zabbix_templates.keys()) ) # If the template isn't in zabbix we can't manage it - with self.db_connection, self.db_connection.cursor() as db_cursor: - db_cursor.execute( - f"SELECT data FROM {self.db_hosts_table} WHERE data->>'enabled' = 'true'" - ) - db_hosts = { - t[0]["hostname"]: models.Host(**t[0]) for t in db_cursor.fetchall() - } - zabbix_hosts = { - host["host"]: host - for host in self.api.host.get( - filter={"status": 0, "flags": 0}, - output=["hostid", "host"], - selectGroups=["groupid", "name"], - selectParentTemplates=["templateid", "host"], - ) - } + + # Get hosts from DB + db_hosts = self.get_db_hosts() + + # Get hosts from Zabbix + _hosts = self.api.get_hosts( + status=MonitoringStatus.ON, + flags=0, + select_groups=True, + select_templates=True, + ) + zabbix_hosts = {host.host: host for host in _hosts} for zabbix_hostname, zabbix_host in zabbix_hosts.items(): if self.stop_event.is_set(): logging.debug("Told to stop. Breaking") break + # Manually managed host - skip it if self.config.hostgroup_manual in [ - group["name"] for group in zabbix_host["groups"] + group.name for group in zabbix_host.groups ]: - logging.debug( - "Skipping manual host: '%s' (%s)", - zabbix_hostname, - zabbix_host["hostid"], - ) + logging.debug("Skipping manual host: %s", zabbix_host) continue + # Disabled hosts are not managed if zabbix_hostname not in db_hosts: logging.debug( - "Skipping host (It is not enabled in the database): '%s' (%s)", - zabbix_hostname, - zabbix_host["hostid"], + "Skipping host (It is not enabled in the database): %s", zabbix_host ) continue db_host = db_hosts[zabbix_hostname] - synced_template_names = set() - for _property in db_host.properties: - if _property in self.property_template_map: - synced_template_names.update(self.property_template_map[_property]) + # Determine managed templates + synced_template_names: Set[str] = set() + for prop in db_host.properties: + if template_names := self.property_template_map.get(prop): + synced_template_names.update(template_names) synced_template_names = synced_template_names.intersection( set(zabbix_templates.keys()) ) # If the template isn't in zabbix we can't manage it - host_templates = {} - for zabbix_template in zabbix_host["parentTemplates"]: - host_templates[zabbix_template["host"]] = zabbix_template["templateid"] + host_templates: Dict[str, Template] = {} + for zabbix_template in zabbix_host.parent_templates: + host_templates[zabbix_template.host] = zabbix_template old_host_templates = host_templates.copy() - host_templates_to_remove = {} + host_templates_to_remove: Dict[str, Template] = {} + # Update templates on host for template_name in list(host_templates.keys()): if ( template_name in managed_template_names @@ -1377,7 +1566,6 @@ def do_update(self): zabbix_hostname, ) host_templates[template_name] = zabbix_templates[template_name] - if host_templates != old_host_templates: logging.info( "Updating templates on host '%s'. Old: %s. New: %s", @@ -1386,29 +1574,35 @@ def do_update(self): ", ".join(host_templates.keys()), ) if host_templates_to_remove: - self.clear_templates(host_templates_to_remove, zabbix_host) + self.clear_templates( + list(host_templates_to_remove.values()), zabbix_host + ) # TODO: Setting templates might not be necessary if we only removed templates. Consider refactor # TODO: Setting templates should not be performed if template clearing has failed (will lead to unlink without clear) - self.set_templates(host_templates, zabbix_host) + self.set_templates(list(host_templates.values()), zabbix_host) class ZabbixHostgroupUpdater(ZabbixUpdater): - def set_hostgroups(self, hostgroups, host): - if not self.config.dryrun: - logging.debug("Setting hostgroups on host: '%s'", host["host"]) - try: - groups = [ - {"groupid": hostgroup_id} for _, hostgroup_id in hostgroups.items() - ] - self.api.host.update(hostid=host["hostid"], groups=groups) - except pyzabbix.ZabbixAPIException as e: - logging.error( - "Error when setting hostgroups on host '%s': %s", - host["host"], - e.args, - ) + def __init__( + self, name: str, state: State, db_uri: str, settings: models.Settings + ) -> None: + super().__init__(name, state, db_uri, settings) + self.update_interval = ( + self.settings.zac.process.hostgroup_updater.update_interval + ) + + def set_hostgroups(self, host: Host, hostgroups: List[HostGroup]) -> None: + """Set host groups on a host given a list of host groups.""" + to_add = ", ".join(f"{hg.name!r}" for hg in hostgroups) + if self.config.dryrun: + logging.debug("DRYRUN: Setting hostgroups %s on host: %s", to_add, host) + return + try: + self.api.set_host_hostgroups(host, hostgroups) + except ZabbixAPIException as e: + logging.error("Error when setting hostgroups on host %s: %s", host, e) else: - logging.debug("DRYRUN: Setting hostgroups on host: '%s'", host["host"]) + logging.info("Set hostgroups %s on host: %s", to_add, host) def create_hostgroup(self, hostgroup_name: str) -> Optional[str]: if self.config.dryrun: @@ -1417,27 +1611,23 @@ def create_hostgroup(self, hostgroup_name: str) -> Optional[str]: logging.debug("Creating hostgroup: '%s'", hostgroup_name) try: - result = self.api.hostgroup.create(name=hostgroup_name) - groupid = result["groupids"][0] + groupid = self.api.create_hostgroup(hostgroup_name) logging.info("Created host group '%s' (%s)", hostgroup_name, groupid) return groupid - except pyzabbix.ZabbixAPIException as e: - logging.error( - "Error when creating hostgroups '%s': %s", hostgroup_name, e.args - ) + except ZabbixAPIException as e: + logging.error("Error when creating hostgroups '%s': %s", hostgroup_name, e) return None - def create_extra_hostgroups( - self, existing_hostgroups: List[Dict[str, str]] - ) -> None: + def create_extra_hostgroups(self, existing_hostgroups: List[HostGroup]) -> None: """Creates additonal host groups based on the prefixes specified in the config file. These host groups are not assigned hosts by ZAC.""" - hostgroup_names = set(h["name"] for h in existing_hostgroups) + hostgroup_names = set(h.name for h in existing_hostgroups) for prefix in self.config.extra_siteadmin_hostgroup_prefixes: mapping = utils.mapping_values_with_prefix( self.siteadmin_hostgroup_map, # this is copied in the function prefix=prefix, + separator=self.config.prefix_separator, ) for hostgroups in mapping.values(): for hostgroup in hostgroups: @@ -1452,41 +1642,46 @@ def create_templategroup(self, templategroup_name: str) -> Optional[str]: logging.debug("Creating template group: '%s'", templategroup_name) try: - result = self.api.templategroup.create(name=templategroup_name) - groupid = result["groupids"][0] + groupid = self.api.create_templategroup(templategroup_name) logging.info( "Created template group '%s' (%s)", templategroup_name, groupid ) return groupid - except pyzabbix.ZabbixAPIException as e: + except ZabbixAPIException as e: logging.error( - "Error when creating template group '%s': %s", - templategroup_name, - e.args, + "Error when creating template group '%s': %s", templategroup_name, e ) return None - def create_templategroups(self, existing_hostgroups: List[Dict[str, str]]) -> None: + def create_templategroups(self, existing_hostgroups: List[HostGroup]) -> None: """Creates template groups for each host group in the siteadmin mapping file with the configured template group prefix. For Zabbix <6.2, host groups are created instead of template groups.""" # Construct a set of all template group names from siteadmin mapping file - # by replacing the host group prefix with the template group prefix + # by replacing the host group prefix with the template group prefix. + # The prefix is determined by the separator defined in the config file. + # If we use the template group prefix `Templates-`, we go from + # `Siteadmin-bob-hosts` to `Templates-bob-hosts`. tgroups = set( - utils.with_prefix(tg, self.config.templategroup_prefix) + utils.with_prefix( + tg, + self.config.templategroup_prefix, + separator=self.config.prefix_separator, + ) for tg in itertools.chain.from_iterable( self.siteadmin_hostgroup_map.values() ) ) - if self.zabbix_version.release >= (6, 2, 0): + if compat.templategroups_supported(self.zabbix_version): logging.debug( - "Zabbix version is %s. Creating template groups.", self.zabbix_version + "Zabbix version is %s. Will create template groups.", + self.zabbix_version, ) self._create_templategroups(tgroups) else: logging.debug( - "Zabbix version is %s. Creating template groups as host groups.", + "Zabbix version is %s. Will create host groups instead of template groups.", self.zabbix_version, ) self._create_templategroups_pre_62_compat(tgroups, existing_hostgroups) @@ -1497,15 +1692,15 @@ def _create_templategroups(self, tgroups: Set[str]) -> None: Args: tgroups: A set of template group names to create. """ - res = self.api.templategroup.get(output=["name", "groupid"]) - existing_tgroups = set(tg["name"] for tg in res) + res = self.api.get_templategroups() + existing_tgroups = set(tg.name for tg in res) for tgroup in tgroups: if tgroup in existing_tgroups: continue self.create_templategroup(tgroup) def _create_templategroups_pre_62_compat( - self, tgroups: Set[str], existing_hostgroups: List[Dict[str, str]] + self, tgroups: Set[str], existing_hostgroups: List[HostGroup] ) -> None: """Compatibility method for creating template groups on Zabbix <6.2. @@ -1513,23 +1708,23 @@ def _create_templategroups_pre_62_compat( host groups with the given names. Args: - tgroups: A set of template group names to create. + tgroups: A set of host group names to create. """ - existing_hgroup_names = set(h["name"] for h in existing_hostgroups) + existing_hgroup_names = set(h.name for h in existing_hostgroups) for tgroup in tgroups: if tgroup in existing_hgroup_names: continue self.create_hostgroup(tgroup) - def do_update(self): + def do_update(self) -> None: managed_hostgroup_names = set( itertools.chain.from_iterable(self.property_hostgroup_map.values()) - ) + ) # type: Set[str] managed_hostgroup_names.update( itertools.chain.from_iterable(self.siteadmin_hostgroup_map.values()) ) - existing_hostgroups = self.api.hostgroup.get(output=["name", "groupid"]) + existing_hostgroups = self.api.get_hostgroups() # Create extra host groups if necessary if self.config.extra_siteadmin_hostgroup_prefixes: @@ -1539,65 +1734,57 @@ def do_update(self): if self.config.create_templategroups: self.create_templategroups(existing_hostgroups) - zabbix_hostgroups = {} + zabbix_hostgroups: Dict[str, HostGroup] = {} # type: Dict[str, str] for zabbix_hostgroup in existing_hostgroups: - zabbix_hostgroups[zabbix_hostgroup["name"]] = zabbix_hostgroup["groupid"] - if zabbix_hostgroup["name"].startswith(self.config.hostgroup_source_prefix): - managed_hostgroup_names.add(zabbix_hostgroup["name"]) - if zabbix_hostgroup["name"].startswith( + zabbix_hostgroups[zabbix_hostgroup.name] = zabbix_hostgroup + if zabbix_hostgroup.name.startswith(self.config.hostgroup_source_prefix): + managed_hostgroup_names.add(zabbix_hostgroup.name) + if zabbix_hostgroup.name.startswith( self.config.hostgroup_importance_prefix ): - managed_hostgroup_names.add(zabbix_hostgroup["name"]) + managed_hostgroup_names.add(zabbix_hostgroup.name) managed_hostgroup_names.update([self.config.hostgroup_all]) - with self.db_connection, self.db_connection.cursor() as db_cursor: - db_cursor.execute( - f"SELECT data FROM {self.db_hosts_table} WHERE data->>'enabled' = 'true'" - ) - db_hosts = { - t[0]["hostname"]: models.Host(**t[0]) for t in db_cursor.fetchall() - } - zabbix_hosts = { - host["host"]: host - for host in self.api.host.get( - filter={"status": 0, "flags": 0}, - output=["hostid", "host"], - selectGroups=["groupid", "name"], - selectParentTemplates=["templateid", "host"], - ) - } + # Get hosts from DB + db_hosts = self.get_db_hosts() + + # Get hosts from Zabbix + _hosts = self.api.get_hosts( + status=MonitoringStatus.ON, + flags=0, + select_groups=True, + select_templates=True, + ) + zabbix_hosts = {host.host: host for host in _hosts} + # Iterate over hosts in Zabbix and update synced hosts for zabbix_hostname, zabbix_host in zabbix_hosts.items(): if self.stop_event.is_set(): logging.debug("Told to stop. Breaking") break + # Host is manually managed - skip it if self.config.hostgroup_manual in [ - group["name"] for group in zabbix_host["groups"] + group.name for group in zabbix_host.groups ]: - logging.debug( - "Skipping manual host: '%s' (%s)", - zabbix_hostname, - zabbix_host["hostid"], - ) + logging.debug("Skipping manual host: %s", zabbix_host) continue + # Disabled hosts are not managed if zabbix_hostname not in db_hosts: logging.debug( - "Skipping host (It is not enabled in the database): '%s' (%s)", - zabbix_hostname, - zabbix_host["hostid"], + "Skipping host (It is not enabled in the database): %s", zabbix_host ) continue db_host = db_hosts[zabbix_hostname] + # Determine host groups to sync for host + # Sync host groups derived from its properties, siteadmins, sources, etc. synced_hostgroup_names = set([self.config.hostgroup_all]) - for _property in db_host.properties: - if _property in self.property_hostgroup_map: - synced_hostgroup_names.update( - self.property_hostgroup_map[_property] - ) + for prop in db_host.properties: + if prop in self.property_hostgroup_map: + synced_hostgroup_names.update(self.property_hostgroup_map[prop]) for siteadmin in db_host.siteadmins: if siteadmin in self.siteadmin_hostgroup_map: synced_hostgroup_names.update( @@ -1616,42 +1803,53 @@ def do_update(self): f"{self.config.hostgroup_importance_prefix}X" ) - host_hostgroups = {} - for zabbix_hostgroup in zabbix_host["groups"]: - host_hostgroups[zabbix_hostgroup["name"]] = zabbix_hostgroup["groupid"] - + host_hostgroups: Dict[str, HostGroup] = {} + for zabbix_hostgroup in zabbix_host.groups: + host_hostgroups[zabbix_hostgroup.name] = zabbix_hostgroup old_host_hostgroups = host_hostgroups.copy() for hostgroup_name in list(host_hostgroups.keys()): # TODO: Here lies a bug due to managed_hostgroup_names not being properly updated above? + # NOTE (pederhan): Not sure what this refers to? if ( hostgroup_name in managed_hostgroup_names and hostgroup_name not in synced_hostgroup_names ): logging.debug( - "Going to remove hostgroup '%s' from host '%s'.", + "Going to remove hostgroup '%s' from host %s.", hostgroup_name, - zabbix_hostname, + zabbix_host, ) del host_hostgroups[hostgroup_name] + + # Update host groups for host + # Creates synced host groups if they don't exist for hostgroup_name in synced_hostgroup_names: if hostgroup_name not in host_hostgroups.keys(): logging.debug( - "Going to add hostgroup '%s' to host '%s'.", + "Going to add hostgroup '%s' to host %s.", hostgroup_name, - zabbix_hostname, + zabbix_host, ) - zabbix_hostgroup_id = zabbix_hostgroups.get(hostgroup_name, None) - if not zabbix_hostgroup_id: + zabbix_hostgroup = zabbix_hostgroups.get(hostgroup_name, None) + if not zabbix_hostgroup: # The hostgroup doesn't exist. We need to create it. - zabbix_hostgroup_id = self.create_hostgroup(hostgroup_name) - host_hostgroups[hostgroup_name] = zabbix_hostgroup_id + hostgroup_id = self.create_hostgroup(hostgroup_name) + # Add group to mapping so we don't try to create it again + if hostgroup_id: + zabbix_hostgroup = self.api.get_hostgroup(hostgroup_id) + zabbix_hostgroups[hostgroup_name] = zabbix_hostgroup + + if zabbix_hostgroup: + host_hostgroups[hostgroup_name] = zabbix_hostgroup - if host_hostgroups != old_host_hostgroups: + # Compare names of host groups to see if they are changed + if sorted(host_hostgroups) != sorted(old_host_hostgroups): logging.info( - "Updating hostgroups on host '%s'. Old: %s. New: %s", + "Updating host groups on host '%s'. Old: %s. New: %s", zabbix_hostname, - ", ".join(old_host_hostgroups.keys()), - ", ".join(host_hostgroups.keys()), + # Just re-compute here (it's cheap enough) + ", ".join(sorted(old_host_hostgroups)), + ", ".join(sorted(host_hostgroups)), ) - self.set_hostgroups(host_hostgroups, zabbix_host) + self.set_hostgroups(zabbix_host, list(host_hostgroups.values())) diff --git a/zabbix_auto_config/py.typed b/zabbix_auto_config/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/zabbix_auto_config/pyzabbix/__init__.py b/zabbix_auto_config/pyzabbix/__init__.py new file mode 100644 index 0000000..368779c --- /dev/null +++ b/zabbix_auto_config/pyzabbix/__init__.py @@ -0,0 +1,3 @@ +from __future__ import annotations + +# from .pyzabbix import * # noqa diff --git a/zabbix_auto_config/pyzabbix/client.py b/zabbix_auto_config/pyzabbix/client.py new file mode 100644 index 0000000..ea1643e --- /dev/null +++ b/zabbix_auto_config/pyzabbix/client.py @@ -0,0 +1,2189 @@ +# +# The code in this file is based on the pyzabbix library: +# https://github.com/lukecyca/pyzabbix +# +# Numerous changes have been made to the original code to make it more +# type-safe and to better fit the use-cases of the zabbix-cli project. +# +# We have modified the login method to be able to send an auth-token so +# we do not have to login again as long as the auth-token used is still +# active. +# +# We have also modified the output when an error happens to not show +# the username + password information. +# +from __future__ import annotations + +import logging +from datetime import datetime +from typing import TYPE_CHECKING +from typing import Any +from typing import Dict +from typing import List +from typing import Literal +from typing import MutableMapping +from typing import Optional +from typing import Union + +import httpx +from pydantic import ValidationError + +from zabbix_auto_config.__about__ import __version__ +from zabbix_auto_config.exceptions import ZabbixAPICallError +from zabbix_auto_config.exceptions import ZabbixAPIException +from zabbix_auto_config.exceptions import ZabbixAPIRequestError +from zabbix_auto_config.exceptions import ZabbixAPIResponseParsingError +from zabbix_auto_config.exceptions import ZabbixNotFoundError +from zabbix_auto_config.pyzabbix import compat +from zabbix_auto_config.pyzabbix.enums import AgentAvailable +from zabbix_auto_config.pyzabbix.enums import DataCollectionMode +from zabbix_auto_config.pyzabbix.enums import GUIAccess +from zabbix_auto_config.pyzabbix.enums import InterfaceType +from zabbix_auto_config.pyzabbix.enums import InventoryMode +from zabbix_auto_config.pyzabbix.enums import MaintenanceStatus +from zabbix_auto_config.pyzabbix.enums import MonitoredBy +from zabbix_auto_config.pyzabbix.enums import MonitoringStatus +from zabbix_auto_config.pyzabbix.enums import TriggerPriority +from zabbix_auto_config.pyzabbix.enums import UsergroupPermission +from zabbix_auto_config.pyzabbix.enums import UserRole +from zabbix_auto_config.pyzabbix.types import CreateHostInterfaceDetails +from zabbix_auto_config.pyzabbix.types import GlobalMacro +from zabbix_auto_config.pyzabbix.types import Host +from zabbix_auto_config.pyzabbix.types import HostGroup +from zabbix_auto_config.pyzabbix.types import HostInterface +from zabbix_auto_config.pyzabbix.types import HostTag +from zabbix_auto_config.pyzabbix.types import Image +from zabbix_auto_config.pyzabbix.types import ImportRules +from zabbix_auto_config.pyzabbix.types import Item +from zabbix_auto_config.pyzabbix.types import Json +from zabbix_auto_config.pyzabbix.types import Macro +from zabbix_auto_config.pyzabbix.types import Maintenance +from zabbix_auto_config.pyzabbix.types import Map +from zabbix_auto_config.pyzabbix.types import MediaType +from zabbix_auto_config.pyzabbix.types import ParamsType +from zabbix_auto_config.pyzabbix.types import Proxy +from zabbix_auto_config.pyzabbix.types import Role +from zabbix_auto_config.pyzabbix.types import Template +from zabbix_auto_config.pyzabbix.types import TemplateGroup +from zabbix_auto_config.pyzabbix.types import Trigger +from zabbix_auto_config.pyzabbix.types import UpdateHostInterfaceDetails +from zabbix_auto_config.pyzabbix.types import User +from zabbix_auto_config.pyzabbix.types import Usergroup +from zabbix_auto_config.pyzabbix.types import UserMedia +from zabbix_auto_config.pyzabbix.types import ZabbixAPIResponse +from zabbix_auto_config.pyzabbix.types import ZabbixRight + +if TYPE_CHECKING: + from httpx._types import TimeoutTypes + from packaging.version import Version + from typing_extensions import TypedDict + + from zabbix_auto_config.pyzabbix.types import ModifyGroupParams # noqa: F401 + from zabbix_auto_config.pyzabbix.types import ModifyHostParams # noqa: F401 + from zabbix_auto_config.pyzabbix.types import ModifyTemplateParams # noqa: F401 + from zabbix_auto_config.pyzabbix.types import SortOrder # noqa: F401 + + class HTTPXClientKwargs(TypedDict, total=False): + timeout: TimeoutTypes + + +logger = logging.getLogger(__name__) + +RPC_ENDPOINT = "/api_jsonrpc.php" + + +def append_param( + data: MutableMapping[str, Any], key: str, value: Any +) -> MutableMapping[str, Any]: + """Append a value to a list in a dictionary. + + If the key does not exist in the dictionary, it is created with a list + containing the value. If the key already exists and the value is not a list, + the value is converted to a list and appended to the existing list. + """ + if key in data: + if not isinstance(data[key], list): + logger.debug("Converting param %s to list", key, stacklevel=2) + data[key] = [data[key]] + else: + data[key] = [] + data[key].append(value) + return data + + +def add_param( + data: MutableMapping[str, Any], key: str, subkey: str, value: Any +) -> MutableMapping[str, Any]: + """Add a value to a nested dict in dict.""" + if key in data: + if not isinstance(data[key], dict): + logger.debug("Converting param %s to dict", key, stacklevel=2) + data[key] = {key: data[key]} + else: + data[key] = {} + data[key][subkey] = value + return data + + +class ZabbixAPI: + def __init__( + self, + server: str = "http://localhost/zabbix", + timeout: Optional[int] = None, + read_only: bool = False, + ): + """Parameters: + server: Base URI for zabbix web interface (omitting /api_jsonrpc.php) + timeout: optional connect and read timeout in seconds. + """ + self.timeout = timeout if timeout else None + self.session = self._get_client(verify_ssl=True, timeout=timeout) + self.read_only = read_only + + self.auth = "" + self.id = 0 + + server, _, _ = server.partition(RPC_ENDPOINT) + self.url = f"{server}/api_jsonrpc.php" + logger.info("JSON-RPC Server Endpoint: %s", self.url) + + # Attributes for properties + self._version: Optional[Version] = None + + def _get_client( + self, verify_ssl: bool, timeout: Union[float, int, None] = None + ) -> httpx.Client: + kwargs: HTTPXClientKwargs = {} + if timeout is not None: + kwargs["timeout"] = timeout + client = httpx.Client( + verify=verify_ssl, + # Default headers for all requests + headers={ + "Content-Type": "application/json-rpc", + "User-Agent": f"python/zabbix-auto-config/{__version__}", + "Cache-Control": "no-cache", + }, + **kwargs, + ) + return client + + def login( + self, + user: Optional[str] = None, + password: Optional[str] = None, + auth_token: Optional[str] = None, + ) -> str: + """Convenience method for logging into the API and storing the resulting + auth token as an instance variable. + """ + # Before we do anything, we try to fetch the API version + # Without an API connection, we cannot determine + # the user parameter name to use when logging in. + try: + self.version # property + except ZabbixAPIRequestError as e: + raise ZabbixAPIException( + f"Failed to connect to Zabbix API at {self.url}" + ) from e + + # The username kwarg was called "user" in Zabbix 5.2 and earlier. + # This sets the correct kwarg for the version of Zabbix we're using. + user_kwarg = {compat.login_user_name(self.version): user} + + self.auth = "" # clear auth before trying to (re-)login + + if not auth_token: + try: + auth = self.user.login(**user_kwarg, password=password) + except Exception as e: + raise ZabbixAPIRequestError( + f"Failed to log in to Zabbix API: {e}" + ) from e + else: + auth = auth_token + # TODO: confirm we are logged in here + # self.api_version() # NOTE: useless? can we remove this? + self.auth = str(auth) if auth else "" # ensure str + return self.auth + + def confimport(self, format: str, source: str, rules: ImportRules) -> Any: + """Alias for configuration.import because it clashes with + Python's import reserved keyword + """ + return self.do_request( + method="configuration.import", + params={ + "format": format, + "source": source, + "rules": rules.model_dump_api(), + }, + ).result + + # TODO (pederhan): Use functools.cachedproperty when we drop 3.7 support + @property + def version(self) -> Version: + """Alternate version of api_version() that caches version info + as a Version object. + """ + if self._version is None: + from packaging.version import Version + + self._version = Version(self.apiinfo.version()) + return self._version + + def api_version(self): + return self.apiinfo.version() + + def do_request( + self, method: str, params: Optional[ParamsType] = None + ) -> ZabbixAPIResponse: + params = params or {} + + request_json: Dict[str, Json] = { + "jsonrpc": "2.0", + "method": method, + "params": params, + "id": self.id, + } + + # We don't have to pass the auth token if asking for the apiinfo.version + if self.auth and method != "apiinfo.version": + request_json["auth"] = self.auth + # TODO: ensure we have auth token if method requires it + + logger.debug("Sending %s to %s", method, self.url) + + try: + response = self.session.post(self.url, json=request_json) + except Exception as e: + raise ZabbixAPIRequestError( + f"Failed to send request to {self.url} ({method}) with params {params}", + params=params, + ) from e + + logger.debug("Response Code: %s", str(response.status_code)) + + # NOTE: Getting a 412 response code means the headers are not in the + # list of allowed headers. + # OR we didnt pass an auth token + response.raise_for_status() + + if not len(response.text): + raise ZabbixAPIRequestError("Received empty response", response=response) + + self.id += 1 + + try: + resp = ZabbixAPIResponse.model_validate_json(response.text) + except ValidationError as e: + raise ZabbixAPIResponseParsingError( + "Zabbix API returned malformed response", response=response + ) from e + except ValueError as e: + raise ZabbixAPIResponseParsingError( + "Zabbix API returned invalid JSON", response=response + ) from e + + if resp.error is not None: + # some errors don't contain 'data': workaround for ZBX-9340 + if not resp.error.data: + resp.error.data = "No data" + raise ZabbixAPIRequestError( + f"Error: {resp.error.message} {resp.error.data}", + api_response=resp, + response=response, + ) + return resp + + def get_hostgroup( + self, + name_or_id: str, + search: bool = False, + select_hosts: bool = False, + select_templates: bool = False, + sort_order: Optional[SortOrder] = None, + sort_field: Optional[str] = None, + ) -> HostGroup: + """Fetch a host group given its name or ID. + + Name or ID argument is interpeted as an ID if the argument is numeric. + + Uses filtering by default, but can be switched to searching by setting + the `search` argument to True. + + Args: + name_or_id (str): Name or ID of the host group. + search (bool, optional): Search for host groups using the given pattern instead of filtering. Defaults to False. + select_hosts (bool, optional): Fetch hosts in host groups. Defaults to False. + select_templates (bool, optional): <6.2 ONLY: Fetch templates in host groups. Defaults to False. + + Raises: + ZabbixNotFoundError: Group is not found. + + Returns: + HostGroup: The host group object. + """ + hostgroups = self.get_hostgroups( + name_or_id, + search=search, + sort_order=sort_order, + sort_field=sort_field, + select_hosts=select_hosts, + select_templates=select_templates, + ) + if not hostgroups: + raise ZabbixNotFoundError(f"Host group {name_or_id!r} not found") + return hostgroups[0] + + def get_hostgroups( + self, + *names_or_ids: str, + search: bool = False, + search_union: bool = True, + select_hosts: bool = False, + select_templates: bool = False, + sort_order: Optional[SortOrder] = None, + sort_field: Optional[str] = None, + ) -> List[HostGroup]: + """Fetch a list of host groups given its name or ID. + + Name or ID argument is interpeted as an ID if the argument is numeric. + + Uses filtering by default, but can be switched to searching by setting + the `search` argument to True. + + Args: + name_or_id (str): Name or ID of the host group. + search (bool, optional): Search for host groups using the given pattern instead of filtering. Defaults to False. + search_union (bool, optional): Union searching. Has no effect if `search` is False. Defaults to True. + select_hosts (bool, optional): Fetch hosts in host groups. Defaults to False. + select_templates (bool, optional): <6.2 ONLY: Fetch templates in host groups. Defaults to False. + sort_order (SortOrder, optional): Sort order. Defaults to None. + sort_field (str, optional): Sort field. Defaults to None. + + Raises: + ZabbixNotFoundError: Group is not found. + + Returns: + List[HostGroup]: List of host groups. + """ + # TODO: refactor this along with other methods that take names or ids (or wildcards) + params: ParamsType = {"output": "extend"} + search_params: ParamsType = {} + + if "*" in names_or_ids: + names_or_ids = tuple() + + if names_or_ids: + for name_or_id in names_or_ids: + norid = name_or_id.strip() + is_id = norid.isnumeric() + norid_key = "groupid" if is_id else "name" + if search and not is_id: + params["searchWildcardsEnabled"] = True + params["searchByAny"] = search_union + append_param(search_params, "name", name_or_id) + else: + params["filter"] = {norid_key: name_or_id} + + if search_params: + params["search"] = search_params + if select_hosts: + params["selectHosts"] = "extend" + if self.version.release < (6, 2, 0) and select_templates: + params["selectTemplates"] = "extend" + if sort_order: + params["sortorder"] = sort_order + if sort_field: + params["sortfield"] = sort_field + + resp: List[Any] = self.hostgroup.get(**params) or [] + return [HostGroup(**hostgroup) for hostgroup in resp] + + def create_hostgroup(self, name: str) -> str: + """Create a host group with the given name.""" + try: + resp = self.hostgroup.create(name=name) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to create host group {name!r}: {e}" + ) from e + if not resp or not resp.get("groupids"): + raise ZabbixAPICallError( + "Host group creation returned no data. Unable to determine if group was created." + ) + return str(resp["groupids"][0]) + + def delete_hostgroup(self, hostgroup_id: str) -> None: + """Deletes a host group given its ID.""" + try: + self.hostgroup.delete(hostgroup_id) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete host group(s) with ID {hostgroup_id}" + ) from e + + def set_host_hostgroups(self, host: Host, hostgroups: List[HostGroup]) -> None: + """Sets a host's groups. + + Removes host from any groups not present in the `hostgroups` argument.""" + try: + self.host.update( + hostid=host.hostid, + groups=[{"groupid": hg.groupid} for hg in hostgroups], + ) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to set host groups for host {host.hostid}" + ) from e + + def add_hosts_to_hostgroups( + self, hosts: List[Host], hostgroups: List[HostGroup] + ) -> None: + """Adds hosts to one or more host groups.""" + try: + self.hostgroup.massadd( + groups=[{"groupid": hg.groupid} for hg in hostgroups], + hosts=[{"hostid": host.hostid} for host in hosts], + ) + except ZabbixAPIException as e: + hgs = ", ".join(hg.name for hg in hostgroups) + raise ZabbixAPICallError(f"Failed to add hosts to {hgs}") from e + + def remove_hosts_from_hostgroups( + self, hosts: List[Host], hostgroups: List[HostGroup] + ) -> None: + """Removes the given hosts from one or more host groups.""" + try: + self.hostgroup.massremove( + groupids=[hg.groupid for hg in hostgroups], + hostids=[host.hostid for host in hosts], + ) + except ZabbixAPIException as e: + hgs = ", ".join(hg.name for hg in hostgroups) + raise ZabbixAPICallError(f"Failed to remove hosts from {hgs}") from e + + def get_templategroup( + self, + name_or_id: str, + search: bool = False, + select_templates: bool = False, + ) -> TemplateGroup: + """Fetch a template group given its name or ID. + + Name or ID argument is interpeted as an ID if the argument is numeric. + + Uses filtering by default, but can be switched to searching by setting + the `search` argument to True. + + Args: + name_or_id (str): Name or ID of the template group. + search (bool, optional): Search for template groups using the given pattern instead of filtering. Defaults to False. + select_templates (bool, optional): Fetch full information for each template in the group. Defaults to False. + + Raises: + ZabbixNotFoundError: Group is not found. + + Returns: + TemplateGroup: The template group object. + """ + tgroups = self.get_templategroups( + name_or_id, search=search, select_templates=select_templates + ) + if not tgroups: + raise ZabbixNotFoundError(f"Template group {name_or_id!r} not found") + return tgroups[0] + + def get_templategroups( + self, + *names_or_ids: str, + search: bool = False, + search_union: bool = True, + select_templates: bool = False, + sort_field: Optional[str] = None, + sort_order: Optional[SortOrder] = None, + ) -> List[TemplateGroup]: + """Fetch a list of template groups, optionally filtered by name(s). + + Name or ID argument is interpeted as an ID if the argument is numeric. + + Uses filtering by default, but can be switched to searching by setting + the `search` argument to True. + + Args: + name_or_id (str): Name or ID of the template group. + search (bool, optional): Search for template groups using the given pattern instead of filtering. Defaults to False. + search_union (bool, optional): Union searching. Has no effect if `search` is False. Defaults to True. + select_templates (bool, optional): Fetch templates in each group. Defaults to False. + sort_order (SortOrder, optional): Sort order. Defaults to None. + sort_field (str, optional): Sort field. Defaults to None. + + Raises: + ZabbixNotFoundError: Group is not found. + + Returns: + List[TemplateGroup]: List of template groups. + """ + # FIXME: ensure we use searching correctly here + # TODO: refactor this along with other methods that take names or ids (or wildcards) + params: ParamsType = {"output": "extend"} + search_params: ParamsType = {} + + if "*" in names_or_ids: + names_or_ids = tuple() + + if names_or_ids: + for name_or_id in names_or_ids: + norid = name_or_id.strip() + is_id = norid.isnumeric() + norid_key = "groupid" if is_id else "name" + if search and not is_id: + params["searchWildcardsEnabled"] = True + params["searchByAny"] = search_union + append_param(search_params, "name", name_or_id) + else: + params["filter"] = {norid_key: name_or_id} + if search_params: + params["search"] = search_params + if select_templates: + params["selectTemplates"] = "extend" + if sort_order: + params["sortorder"] = sort_order + if sort_field: + params["sortfield"] = sort_field + + try: + resp: List[Any] = self.templategroup.get(**params) or [] + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch template groups") from e + return [TemplateGroup(**tgroup) for tgroup in resp] + + def create_templategroup(self, name: str) -> str: + """Create a template group with the given name.""" + try: + resp = self.templategroup.create(name=name) + except ZabbixAPIException as e: + raise ZabbixAPICallError(f"Failed to create template group {name!r}") from e + if not resp or not resp.get("groupids"): + raise ZabbixAPICallError( + "Template group creation returned no data. Unable to determine if group was created." + ) + return str(resp["groupids"][0]) + + def delete_templategroup(self, templategroup_id: str) -> None: + """Deletes a template group given its ID.""" + try: + self.templategroup.delete(templategroup_id) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete template group(s) with ID {templategroup_id}" + ) from e + + def get_host( + self, + name_or_id: str, + select_groups: bool = False, + select_templates: bool = False, + select_interfaces: bool = False, + select_inventory: bool = False, + select_macros: bool = False, + proxyid: Optional[str] = None, + maintenance: Optional[MaintenanceStatus] = None, + status: Optional[MonitoringStatus] = None, + agent_status: Optional[AgentAvailable] = None, + sort_field: Optional[str] = None, + sort_order: Optional[SortOrder] = None, + search: bool = False, + ) -> Host: + """Fetch a host given a name or id.""" + hosts = self.get_hosts( + name_or_id, + select_groups=select_groups, + select_templates=select_templates, + select_inventory=select_inventory, + select_interfaces=select_interfaces, + select_macros=select_macros, + proxyid=proxyid, + sort_field=sort_field, + sort_order=sort_order, + search=search, + maintenance=maintenance, + status=status, + agent_status=agent_status, + ) + if not hosts: + raise ZabbixNotFoundError( + f"Host {name_or_id!r} not found. Check your search pattern and filters." + ) + return hosts[0] + + def get_hosts( + self, + *names_or_ids: str, + select_groups: bool = False, + select_templates: bool = False, + select_inventory: bool = False, + select_macros: bool = False, + select_interfaces: bool = False, + select_tags: bool = False, + proxyid: Optional[str] = None, + # These params take special API values we don't want to evaluate + # inside this method, so we delegate it to the enums. + maintenance: Optional[MaintenanceStatus] = None, + status: Optional[MonitoringStatus] = None, + agent_status: Optional[AgentAvailable] = None, + flags: Optional[int] = None, + sort_field: Optional[str] = None, + sort_order: Optional[Literal["ASC", "DESC"]] = None, + search: Optional[ + bool + ] = True, # we generally always want to search when multiple hosts are requested + # **filter_kwargs, + ) -> List[Host]: + """Fetch all hosts matching the given criteria(s). + + Hosts can be filtered by name or ID. Names and IDs cannot be mixed. + If no criteria are given, all hosts are returned. + + A number of extra properties can be fetched for each host by setting + the corresponding `select_*` argument to `True`. Each Host object + will have the corresponding property populated. + + + If `search=True`, only a single hostname pattern should be given; + criterias are matched using logical AND (narrows down results). + If `search=False`, multiple hostnames or IDs can be used. + + Args: + select_groups (bool, optional): Include host (& template groups if >=6.2). Defaults to False. + select_templates (bool, optional): Include templates. Defaults to False. + select_inventory (bool, optional): Include inventory items. Defaults to False. + select_macros (bool, optional): Include host macros. Defaults to False. + proxyid (Optional[str], optional): Filter by proxy ID. Defaults to None. + maintenance (Optional[MaintenanceStatus], optional): Filter by maintenance status. Defaults to None. + status (Optional[MonitoringStatus], optional): Filter by monitoring status. Defaults to None. + agent_status (Optional[AgentAvailable], optional): Filter by agent availability. Defaults to None. + sort_field (Optional[str], optional): Sort hosts by the given field. Defaults to None. + sort_order (Optional[Literal[ASC, DESC]], optional): Sort order. Defaults to None. + search (Optional[bool], optional): Force positional arguments to be treated as a search pattern. Defaults to True. + + Raises: + ZabbixAPIException: _description_ + + Returns: + List[Host]: _description_ + """ + params: ParamsType = {"output": "extend"} + filter_params: ParamsType = {} + search_params: ParamsType = {} + + # Filter by the given host name or ID if we have one + if names_or_ids: + id_mode: Optional[bool] = None + for name_or_id in names_or_ids: + name_or_id = name_or_id.strip() + is_id = name_or_id.isnumeric() + if search is None: # determine if we should search + search = not is_id + + # Set ID mode if we haven't already + # and ensure we aren't mixing IDs and names + if id_mode is None: + id_mode = is_id + else: + if id_mode != is_id: + raise ZabbixAPICallError("Cannot mix host names and IDs.") + + # Searching for IDs is pointless - never allow it + # Logical AND for multiple unique identifiers is not possible + if search and not is_id: + params["searchWildcardsEnabled"] = True + params["searchByAny"] = True + append_param(search_params, "host", name_or_id) + elif is_id: + append_param(params, "hostids", name_or_id) + else: + append_param(filter_params, "host", name_or_id) + + # Filters are applied with a logical AND (narrows down) + if proxyid: + filter_params[compat.host_proxyid(self.version)] = proxyid + if maintenance is not None: + filter_params["maintenance_status"] = maintenance + if status is not None: + filter_params["status"] = status + if agent_status is not None: + filter_params[compat.host_available(self.version)] = agent_status + if flags is not None: + filter_params["flags"] = flags + + if filter_params: # Only add filter if we actually have filter params + params["filter"] = filter_params + if search_params: # ditto for search params + params["search"] = search_params + + if select_groups: + # still returns the result under the "groups" property + # even if we use the new 6.2 selectHostGroups param + param = compat.param_host_get_groups(self.version) + params[param] = "extend" + if select_templates: + params["selectParentTemplates"] = "extend" + if select_inventory: + params["selectInventory"] = "extend" + if select_macros: + params["selectMacros"] = "extend" + if select_interfaces: + params["selectInterfaces"] = "extend" + if select_tags: + params["selectTags"] = "extend" + if sort_field: + params["sortfield"] = sort_field + if sort_order: + params["sortorder"] = sort_order + + resp: List[Any] = self.host.get(**params) or [] + # TODO add result to cache + return [Host(**resp) for resp in resp] + + def create_host( + self, + host: str, + groups: List[HostGroup], + proxy: Optional[Proxy] = None, + status: MonitoringStatus = MonitoringStatus.ON, + interfaces: Optional[List[HostInterface]] = None, + inventory_mode: InventoryMode = InventoryMode.AUTOMATIC, + inventory: Optional[Dict[str, Any]] = None, + description: Optional[str] = None, + ) -> str: + params: ParamsType = { + "host": host, + "status": status, + "inventory_mode": inventory_mode, + } + + # dedup group IDs + groupids = list({group.groupid for group in groups}) + params["groups"] = [{"groupid": groupid} for groupid in groupids] + + if proxy: + params[compat.host_proxyid(self.version)] = proxy.proxyid + + if interfaces: + params["interfaces"] = [iface.model_dump_api() for iface in interfaces] + + if inventory: + params["inventory"] = inventory + + if description: + params["description"] = description + + try: + resp = self.host.create(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError(f"Failed to create host {host!r}") from e + if not resp or not resp.get("hostids"): + raise ZabbixAPICallError( + "Host creation returned no data. Unable to determine if host was created." + ) + return str(resp["hostids"][0]) + + def update_host( + self, + host: Host, + status: Optional[MonitoringStatus] = None, + groups: Optional[List[HostGroup]] = None, + templates: Optional[List[Template]] = None, + tags: Optional[List[HostTag]] = None, + inventory_mode: Optional[InventoryMode] = None, + ) -> None: + """Update a host. + + Parameters + ---------- + host : Host + The host to update + status : Optional[MonitoringStatus] + New stauts for the host + groups : Optional[List[HostGroup]] + New host groups for the host. Replaces existing groups. + templates: Optional[List[Template]] + New templates for the host. Replaces existing templates. + """ + params: ParamsType = {"hostid": host.hostid} + if groups is not None: + params["groups"] = [{"groupid": hg.groupid} for hg in groups] + if status is not None: + params["status"] = status + if templates is not None: + params["templates"] = [t.model_dump_api() for t in templates] + if tags is not None: + params["tags"] = [t.model_dump_api() for t in tags] + if inventory_mode is not None: + params["inventory_mode"] = inventory_mode + try: + self.host.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update host {host.host} ({host.hostid}): {e}" + ) + + def delete_host(self, host_id: str) -> None: + """Deletes a host.""" + try: + self.host.delete(host_id) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete host with ID {host_id!r}" + ) from e + + def host_exists(self, name_or_id: str) -> bool: + """Checks if a host exists given its name or ID.""" + try: + self.get_host(name_or_id) + except ZabbixNotFoundError: + return False + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Unknown error when fetching host {name_or_id}" + ) from e + else: + return True + + def hostgroup_exists(self, hostgroup_name: str) -> bool: + try: + self.get_hostgroup(hostgroup_name) + except ZabbixNotFoundError: + return False + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to fetch host group {hostgroup_name}" + ) from e + else: + return True + + def get_host_interface( + self, + interfaceid: Optional[str] = None, + ) -> HostInterface: + """Fetch a host interface given its ID""" + interfaces = self.get_host_interfaces(interfaceids=interfaceid) + if not interfaces: + raise ZabbixNotFoundError(f"Host interface with ID {interfaceid} not found") + return interfaces[0] + + def get_host_interfaces( + self, + hostids: Union[str, List[str], None] = None, + interfaceids: Union[str, List[str], None] = None, + itemids: Union[str, List[str], None] = None, + triggerids: Union[str, List[str], None] = None, + # Can expand with the rest of the parameters if needed + ) -> List[HostInterface]: + """Fetch a list of host interfaces, optionally filtered by host ID, + interface ID, item ID or trigger ID. + """ + params: ParamsType = {"output": "extend"} + if hostids: + params["hostids"] = hostids + if interfaceids: + params["interfaceids"] = interfaceids + if itemids: + params["itemids"] = itemids + if triggerids: + params["triggerids"] = triggerids + try: + resp = self.hostinterface.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch host interfaces") from e + return [HostInterface(**iface) for iface in resp] + + def create_host_interface( + self, + host: Host, + main: bool, + type: InterfaceType, + use_ip: bool, + port: str, + ip: Optional[str] = None, + dns: Optional[str] = None, + details: Optional[CreateHostInterfaceDetails] = None, + ) -> str: + if not ip and not dns: + raise ZabbixAPIException("Either IP or DNS must be provided") + if use_ip and not ip: + raise ZabbixAPIException("IP must be provided if using IP connection mode.") + if not use_ip and not dns: + raise ZabbixAPIException( + "DNS must be provided if using DNS connection mode." + ) + params: ParamsType = { + "hostid": host.hostid, + "main": int(main), + "type": type, + "useip": int(use_ip), + "port": str(port), + "ip": ip or "", + "dns": dns or "", + } + if type == InterfaceType.SNMP: + if not details: + raise ZabbixAPIException( + "SNMP details must be provided for SNMP interfaces." + ) + params["details"] = details.model_dump_api() + + try: + resp = self.hostinterface.create(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to create host interface for host {host.host!r}" + ) from e + if not resp or not resp.get("interfaceids"): + raise ZabbixAPICallError( + "Host interface creation returned no data. Unable to determine if interface was created." + ) + return str(resp["interfaceids"][0]) + + def update_host_interface( + self, + interface: HostInterface, + hostid: Optional[str] = None, + main: Optional[bool] = None, + type: Optional[InterfaceType] = None, + use_ip: Optional[bool] = None, + port: Optional[str] = None, + ip: Optional[str] = None, + dns: Optional[str] = None, + details: Optional[UpdateHostInterfaceDetails] = None, + ) -> None: + params: ParamsType = {"interfaceid": interface.interfaceid} + if hostid is not None: + params["hostid"] = hostid + if main is not None: + params["main"] = int(main) + if type is not None: + params["type"] = type + if use_ip is not None: + params["useip"] = int(use_ip) + if port is not None: + params["port"] = str(port) + if ip is not None: + params["ip"] = ip + if dns is not None: + params["dns"] = dns + if details is not None: + params["details"] = details.model_dump_api() + try: + self.hostinterface.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update host interface with ID {interface.interfaceid}" + ) from e + + def delete_host_interface(self, interface_id: str) -> None: + """Deletes a host interface.""" + try: + self.hostinterface.delete(interface_id) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete host interface with ID {interface_id}" + ) from e + + def get_usergroup( + self, + name: str, + select_users: bool = False, + select_rights: bool = False, + search: bool = False, + ) -> Usergroup: + """Fetch a user group by name. Always fetches the full contents of the group.""" + groups = self.get_usergroups( + name, + select_users=select_users, + select_rights=select_rights, + search=search, + ) + if not groups: + raise ZabbixNotFoundError(f"User group {name!r} not found") + return groups[0] + + def get_usergroups( + self, + *names: str, + # See get_usergroup for why these are set to True by default + select_users: bool = True, + select_rights: bool = True, + search: bool = False, + ) -> List[Usergroup]: + """Fetch all user groups. Optionally includes users and rights.""" + params: ParamsType = { + "output": "extend", + } + search_params: ParamsType = {} + + if "*" in names: + names = tuple() + if search: + params["searchByAny"] = True # Union search (default is intersection) + params["searchWildcardsEnabled"] = True + + if names: + for name in names: + name = name.strip() + if search: + append_param(search_params, "name", name) + else: + params["filter"] = {"name": name} + + if search_params: + params["search"] = search_params + + # Rights were split into host and template group rights in 6.2.0 + if select_rights: + if self.version.release >= (6, 2, 0): + params["selectHostGroupRights"] = "extend" + params["selectTemplateGroupRights"] = "extend" + else: + params["selectRights"] = "extend" + if select_users: + params["selectUsers"] = "extend" + + try: + res = self.usergroup.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Unable to fetch user groups") from e + else: + return [Usergroup(**usergroup) for usergroup in res] + + def create_usergroup( + self, + usergroup_name: str, + disabled: bool = False, + gui_access: GUIAccess = GUIAccess.DEFAULT, + ) -> str: + """Create a user group with the given name.""" + try: + resp = self.usergroup.create( + name=usergroup_name, + users_status=int(disabled), + gui_access=gui_access, + ) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to create user group {usergroup_name!r}" + ) from e + if not resp or not resp.get("usrgrpids"): + raise ZabbixAPICallError( + "User group creation returned no data. Unable to determine if group was created." + ) + return str(resp["usrgrpids"][0]) + + def add_usergroup_users(self, usergroup_name: str, users: List[User]) -> None: + """Add users to a user group. Ignores users already in the group.""" + self._update_usergroup_users(usergroup_name, users, remove=False) + + def remove_usergroup_users(self, usergroup_name: str, users: List[User]) -> None: + """Remove users from a user group. Ignores users not in the group.""" + self._update_usergroup_users(usergroup_name, users, remove=True) + + def _update_usergroup_users( + self, usergroup_name: str, users: List[User], remove: bool = False + ) -> None: + """Add/remove users from user group. + + Takes in the name of a user group instead of a `UserGroup` object + to ensure the user group is fetched with `select_users=True`. + """ + usergroup = self.get_usergroup(usergroup_name, select_users=True) + + params: ParamsType = {"usrgrpid": usergroup.usrgrpid} + + # Add new IDs to existing and remove duplicates + current_userids = [user.userid for user in usergroup.users] + ids_update = [user.userid for user in users if user.userid] + if remove: + new_userids = list(set(current_userids) - set(ids_update)) + else: + new_userids = list(set(current_userids + ids_update)) + + if self.version.release >= (6, 0, 0): + params["users"] = {"userid": uid for uid in new_userids} + else: + params["userids"] = new_userids + self.usergroup.update(usrgrpid=usergroup.usrgrpid, userids=new_userids) + + def update_usergroup_rights( + self, + usergroup_name: str, + groups: List[str], + permission: UsergroupPermission, + hostgroup: bool, + ) -> None: + """Update usergroup rights for host or template groups.""" + usergroup = self.get_usergroup(usergroup_name, select_rights=True) + + params: ParamsType = {"usrgrpid": usergroup.usrgrpid} + + if hostgroup: + hostgroups = [self.get_hostgroup(hg) for hg in groups] + if self.version.release >= (6, 2, 0): + hg_rights = usergroup.hostgroup_rights + else: + hg_rights = usergroup.rights + new_rights = self._get_updated_rights(hg_rights, permission, hostgroups) + params[compat.usergroup_hostgroup_rights(self.version)] = [ + r.model_dump_api() for r in new_rights + ] + else: + if self.version.release < (6, 2, 0): + raise ZabbixAPIException( + "Template group rights are only supported in Zabbix 6.2.0 and later" + ) + templategroups = [self.get_templategroup(tg) for tg in groups] + tg_rights = usergroup.templategroup_rights + new_rights = self._get_updated_rights(tg_rights, permission, templategroups) + params[compat.usergroup_templategroup_rights(self.version)] = [ + r.model_dump_api() for r in new_rights + ] + try: + self.usergroup.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update usergroup rights for {usergroup_name!r}" + ) from e + + def _get_updated_rights( + self, + rights: List[ZabbixRight], + permission: UsergroupPermission, + groups: Union[List[HostGroup], List[TemplateGroup]], + ) -> List[ZabbixRight]: + new_rights: List[ZabbixRight] = [] # list of new rights to add + rights = list(rights) # copy rights (don't modify original) + for group in groups: + for right in rights: + if right.id == group.groupid: + right.permission = permission + break + else: + new_rights.append(ZabbixRight(id=group.groupid, permission=permission)) + rights.extend(new_rights) + return rights + + def get_proxy( + self, name_or_id: str, select_hosts: bool = False, search: bool = True + ) -> Proxy: + """Fetch a single proxy matching the given name.""" + proxies = self.get_proxies(name_or_id, select_hosts=select_hosts, search=search) + if not proxies: + raise ZabbixNotFoundError(f"Proxy {name_or_id!r} not found") + return proxies[0] + + def get_proxies( + self, + *names_or_ids: str, + select_hosts: bool = False, + search: bool = True, + **kwargs: Any, + ) -> List[Proxy]: + """Fetch all proxies. + + NOTE: IDs and names cannot be mixed + """ + params: ParamsType = {"output": "extend"} + search_params: ParamsType = {} + + for name_or_id in names_or_ids: + if name_or_id: + if name_or_id.isnumeric(): + append_param(params, "proxyids", name_or_id) + else: + append_param(params, compat.proxy_name(self.version), name_or_id) + + if select_hosts: + params["selectHosts"] = "extend" + if search and search_params: + params["search"] = search_params + params["searchWildcardsEnabled"] = True + params["searchByAny"] = True + + params.update(**kwargs) + try: + res = self.proxy.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Unknown error when fetching proxies") from e + else: + return [Proxy(**proxy) for proxy in res] + + def get_macro( + self, + host: Optional[Host] = None, + macro_name: Optional[str] = None, + search: bool = False, + select_hosts: bool = False, + select_templates: bool = False, + sort_field: Optional[str] = "macro", + sort_order: Optional[SortOrder] = None, + ) -> Macro: + """Fetch a macro given a host ID and macro name.""" + macros = self.get_macros( + macro_name=macro_name, + host=host, + search=search, + select_hosts=select_hosts, + select_templates=select_templates, + sort_field=sort_field, + sort_order=sort_order, + ) + if not macros: + raise ZabbixNotFoundError("Macro not found") + return macros[0] + + def get_hosts_with_macro(self, macro: str) -> List[Host]: + """Fetch a macro given a host ID and macro name.""" + macros = self.get_macros(macro_name=macro) + if not macros: + raise ZabbixNotFoundError(f"Macro {macro!r} not found.") + return macros[0].hosts + + def get_macros( + self, + macro_name: Optional[str] = None, + host: Optional[Host] = None, + search: bool = False, + select_hosts: bool = False, + select_templates: bool = False, + sort_field: Optional[str] = "macro", + sort_order: Optional[SortOrder] = None, + ) -> List[Macro]: + params: ParamsType = {"output": "extend"} + + if host: + add_param(params, "search", "hostids", host.hostid) + + if macro_name: + add_param(params, "search", "macro", macro_name) + + # Enable wildcard searching if we have one or more search terms + if params.get("search"): + params["searchWildcardsEnabled"] = True + + if select_hosts: + params["selectHosts"] = "extend" + + if select_templates: + params["selectTemplates"] = "extend" + + if sort_field: + params["sortfield"] = sort_field + if sort_order: + params["sortorder"] = sort_order + try: + result = self.usermacro.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to retrieve macros") from e + return [Macro(**macro) for macro in result] + + def get_global_macro( + self, + macro_name: Optional[str] = None, + search: bool = False, + sort_field: Optional[str] = "macro", + sort_order: Optional[SortOrder] = None, + ) -> Macro: + """Fetch a global macro given a macro name.""" + macros = self.get_macros( + macro_name=macro_name, + search=search, + sort_field=sort_field, + sort_order=sort_order, + ) + if not macros: + raise ZabbixNotFoundError("Global macro not found") + return macros[0] + + def get_global_macros( + self, + macro_name: Optional[str] = None, + search: bool = False, + sort_field: Optional[str] = "macro", + sort_order: Optional[SortOrder] = None, + ) -> List[GlobalMacro]: + params: ParamsType = {"output": "extend", "globalmacro": True} + + if macro_name: + add_param(params, "search", "macro", macro_name) + + # Enable wildcard searching if we have one or more search terms + if params.get("search"): + params["searchWildcardsEnabled"] = True + + if sort_field: + params["sortfield"] = sort_field + if sort_order: + params["sortorder"] = sort_order + try: + result = self.usermacro.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to retrieve global macros") from e + + return [GlobalMacro(**macro) for macro in result] + + def create_macro(self, host: Host, macro: str, value: str) -> str: + """Create a macro given a host ID, macro name and value.""" + try: + resp = self.usermacro.create(hostid=host.hostid, macro=macro, value=value) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to create macro {macro!r} for host {host}" + ) from e + if not resp or not resp.get("hostmacroids"): + raise ZabbixNotFoundError( + f"No macro ID returned when creating macro {macro!r} for host {host}" + ) + return resp["hostmacroids"][0] + + def create_global_macro(self, macro: str, value: str) -> str: + """Create a global macro given a macro name and value.""" + try: + resp = self.usermacro.createglobal(macro=macro, value=value) + except ZabbixAPIException as e: + raise ZabbixAPICallError(f"Failed to create global macro {macro!r}.") from e + if not resp or not resp.get("globalmacroids"): + raise ZabbixNotFoundError( + f"No macro ID returned when creating global macro {macro!r}." + ) + return resp["globalmacroids"][0] + + def update_macro(self, macroid: str, value: str) -> str: + """Update a macro given a macro ID and value.""" + try: + resp = self.usermacro.update(hostmacroid=macroid, value=value) + except ZabbixAPIException as e: + raise ZabbixAPICallError(f"Failed to update macro with ID {macroid}") from e + if not resp or not resp.get("hostmacroids"): + raise ZabbixNotFoundError( + f"No macro ID returned when updating macro with ID {macroid}" + ) + return resp["hostmacroids"][0] + + def update_host_inventory(self, host: Host, inventory: Dict[str, str]) -> str: + """Update a host inventory given a host and inventory.""" + try: + resp = self.host.update(hostid=host.hostid, inventory=inventory) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update host inventory for host {host.host!r} (ID {host.hostid})" + ) from e + if not resp or not resp.get("hostids"): + raise ZabbixNotFoundError( + f"No host ID returned when updating inventory for host {host.host!r} (ID {host.hostid})" + ) + return resp["hostids"][0] + + def update_host_proxy(self, host: Host, proxy: Proxy) -> str: + """Update a host's proxy.""" + params: ParamsType = { + "hostid": host.hostid, + compat.host_proxyid(self.version): proxy.proxyid, + } + if self.version.release >= (7, 0, 0): + params["monitored_by"] = MonitoredBy.PROXY.value + try: + resp = self.host.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update host proxy for host {host.host!r} (ID {host.hostid})" + ) from e + if not resp or not resp.get("hostids"): + raise ZabbixNotFoundError( + f"No host ID returned when updating proxy for host {host.host!r} (ID {host.hostid})" + ) + return resp["hostids"][0] + + def clear_host_proxy(self, host: Host) -> str: + """Clear a host's proxy.""" + params: ParamsType = { + "hostid": host.hostid, + compat.host_proxyid(self.version): "0", + } + if self.version.release >= (7, 0, 0): + params["monitored_by"] = MonitoredBy.SERVER.value + try: + resp = self.host.massupdate(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError(f"Failed to clear proxy on host {host}") from e + if not resp or resp.get("hostids") is None: + raise ZabbixNotFoundError( + f"No host ID returned when clearing proxy on host {host}" + ) + return resp["hostids"][0] + + def get_template( + self, + template_name_or_id: str, + select_hosts: bool = False, + select_templates: bool = False, + select_parent_templates: bool = False, + ) -> Template: + """Fetch a single template given its name or ID.""" + templates = self.get_templates( + template_name_or_id, + select_hosts=select_hosts, + select_templates=select_templates, + select_parent_templates=select_parent_templates, + ) + if not templates: + raise ZabbixNotFoundError(f"Template {template_name_or_id!r} not found") + return templates[0] + + def get_templates( + self, + *template_names_or_ids: str, + select_hosts: bool = False, + select_templates: bool = False, + select_parent_templates: bool = False, + ) -> List[Template]: + """Fetch one or more templates given a name or ID.""" + params: ParamsType = {"output": "extend"} + search_params: ParamsType = {} + + # TODO: refactor this along with other methods that take names or ids (or wildcards) + if "*" in template_names_or_ids: + template_names_or_ids = tuple() + + for name_or_id in template_names_or_ids: + name_or_id = name_or_id.strip() + is_id = name_or_id.isnumeric() + if is_id: + append_param(params, "templateids", name_or_id) + else: + append_param(search_params, "host", name_or_id) + params.setdefault("searchWildcardsEnabled", True) + params.setdefault("searchByAny", True) + + if search_params: + params["search"] = search_params + if select_hosts: + params["selectHosts"] = "extend" + if select_templates: + params["selectTemplates"] = "extend" + if select_parent_templates: + params["selectParentTemplates"] = "extend" + + try: + templates = self.template.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Unable to fetch templates") from e + return [Template(**template) for template in templates] + + def add_templates_to_groups( + self, + templates: List[Template], + groups: Union[List[HostGroup], List[TemplateGroup]], + ) -> None: + try: + self.template.massadd( + templates=[ + {"templateid": template.templateid} for template in templates + ], + groups=[{"groupid": group.groupid} for group in groups], + ) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to add templates to group(s)") from e + + def link_templates_to_hosts( + self, templates: List[Template], hosts: List[Host] + ) -> None: + """Link one or more templates to one or more hosts. + + Args: + templates (List[str]): A list of template names or IDs + hosts (List[str]): A list of host names or IDs + """ + if not templates: + raise ZabbixAPIException( + "At least one template is required to link host to" + ) + if not hosts: + raise ZabbixAPIException( + "At least one host is required to link templates to" + ) + template_ids: ModifyTemplateParams = [ + {"templateid": template.templateid} for template in templates + ] + host_ids: ModifyHostParams = [{"hostid": host.hostid} for host in hosts] + try: + self.host.massadd(templates=template_ids, hosts=host_ids) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to link templates") from e + + def unlink_templates_from_hosts( + self, templates: List[Template], hosts: List[Host], clear: bool = True + ) -> None: + """Unlink and clears one or more templates from one or more hosts. + + Args: + templates (List[Template]): A list of templates to unlink + hosts (List[Host]): A list of hosts to unlink templates from + clear (bool): Clear template from host when unlinking it. + """ + if not templates: + raise ZabbixAPIException("At least one template is required") + if not hosts: + raise ZabbixAPIException("At least one host is required") + + params: ParamsType = { + "hostids": [h.hostid for h in hosts], + } + tids = [t.templateid for t in templates] + if clear: + params["templateids_clear"] = tids + else: + params["templateids"] = tids + + try: + self.host.massremove(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to unlink and clear templates") from e + + def link_templates( + self, source: List[Template], destination: List[Template] + ) -> None: + """Link one or more templates to one or more templates + + Destination templates are the templates that ultimately inherit the + items and triggers from the source templates. + + Args: + source (List[Template]): A list of templates to link from + destination (List[Template]): A list of templates to link to + """ + if not source: + raise ZabbixAPIException("At least one source template is required") + if not destination: + raise ZabbixAPIException("At least one destination template is required") + # NOTE: source templates are passed to templates_link param + templates: ModifyTemplateParams = [ + {"templateid": template.templateid} for template in destination + ] + templates_link: ModifyTemplateParams = [ + {"templateid": template.templateid} for template in source + ] + try: + self.template.massadd(templates=templates, templates_link=templates_link) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to link templates") from e + + def unlink_templates( + self, source: List[Template], destination: List[Template], clear: bool = True + ) -> None: + """Unlink template(s) from template(s) and optionally clears them. + + Destination templates are the templates that ultimately inherit the + items and triggers from the source templates. + + Args: + source (List[Template]): A list of templates to unlink + destination (List[Template]): A list of templates to unlink source templates from + clear (bool): Whether to clear the source templates from the destination templates. Defaults to True. + """ + if not source: + raise ZabbixAPIException("At least one source template is required") + if not destination: + raise ZabbixAPIException("At least one destination template is required") + params: ParamsType = { + "templateids": [template.templateid for template in destination], + "templateids_link": [template.templateid for template in source], + } + # NOTE: despite what the docs say, we need to pass both templateids_link and templateids_clear + # in order to unlink and clear templates. Only passing in templateids_clear will just + # unlink the templates but not clear them (????) Absurd behavior. + # This is NOT the case for host.massremove, where `templateids_clear` is sufficient... + if clear: + params["templateids_clear"] = params["templateids_link"] + try: + self.template.massremove(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to unlink template(s)") from e + + def link_templates_to_groups( + self, + templates: List[Template], + groups: Union[List[HostGroup], List[TemplateGroup]], + ) -> None: + """Link one or more templates to one or more host/template groups. + + Callers must ensure that the right type of group is passed in depending + on the Zabbix version: + * Host groups for Zabbix < 6.2 + * Template groups for Zabbix >= 6.2 + + Args: + templates (List[str]): A list of template names or IDs + groups (Union[List[HostGroup], List[TemplateGroup]]): A list of host/template groups + """ + if not templates: + raise ZabbixAPIException("At least one template is required") + if not groups: + raise ZabbixAPIException("At least one group is required") + template_ids: ModifyTemplateParams = [ + {"templateid": template.templateid} for template in templates + ] + group_ids: ModifyGroupParams = [{"groupid": group.groupid} for group in groups] + try: + self.template.massadd(templates=template_ids, groups=group_ids) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to link template(s)") from e + + def remove_templates_from_groups( + self, + templates: List[Template], + groups: Union[List[HostGroup], List[TemplateGroup]], + ) -> None: + """Remove template(s) from host/template group(s). + + Callers must ensure that the right type of group is passed in depending + on the Zabbix version: + * Host groups for Zabbix < 6.2 + * Template groups for Zabbix >= 6.2 + + Args: + templates (List[str]): A list of template names or IDs + groups (Union[List[HostGroup], List[TemplateGroup]]): A list of host/template groups + """ + # NOTE: do we even want to enforce this? + if not templates: + raise ZabbixAPIException("At least one template is required") + if not groups: + raise ZabbixAPIException("At least one group is required") + try: + self.template.massremove( + templateids=[template.templateid for template in templates], + groupids=[group.groupid for group in groups], + ) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to unlink template from groups") from e + + def get_items( + self, + *names: str, + templates: Optional[List[Template]] = None, + hosts: Optional[List[Template]] = None, # NYI + proxies: Optional[List[Proxy]] = None, # NYI + search: bool = True, + monitored: bool = False, + select_hosts: bool = False, + # TODO: implement interfaces + # TODO: implement graphs + # TODO: implement triggers + ) -> List[Item]: + params: ParamsType = {"output": "extend"} + if names: + params["search"] = {"name": names} + if search: + params["searchWildcardsEnabled"] = True + if templates: + params: ParamsType = { + "templateids": [template.templateid for template in templates] + } + if monitored: + params["monitored"] = monitored # false by default in API + if select_hosts: + params["selectHosts"] = "extend" + try: + items = self.item.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Unable to fetch items") from e + return [Item(**item) for item in items] + + def create_user( + self, + username: str, + password: str, + first_name: Optional[str] = None, + last_name: Optional[str] = None, + role: Optional[UserRole] = None, + autologin: Optional[bool] = None, + autologout: Union[str, int, None] = None, + usergroups: Union[List[Usergroup], None] = None, + media: Optional[List[UserMedia]] = None, + ) -> str: + # TODO: handle invalid password + # TODO: handle invalid type + params: ParamsType = { + compat.user_name(self.version): username, + "passwd": password, + } + + if first_name: + params["name"] = first_name + if last_name: + params["surname"] = last_name + + if role: + params[compat.role_id(self.version)] = role + + if usergroups: + params["usrgrps"] = [{"usrgrpid": ug.usrgrpid} for ug in usergroups] + + if autologin is not None: + params["autologin"] = int(autologin) + + if autologout is not None: + params["autologout"] = str(autologout) + + if media: + params[compat.user_medias(self.version)] = [ + m.model_dump(mode="json") for m in media + ] + + resp = self.user.create(**params) + if not resp or not resp.get("userids"): + raise ZabbixAPICallError(f"Creating user {username!r} returned no user ID.") + return resp["userids"][0] + + def get_role(self, name_or_id: str) -> Role: + """Fetch a role given its ID or name.""" + roles = self.get_roles(name_or_id) + if not roles: + raise ZabbixNotFoundError(f"Role {name_or_id!r} not found") + return roles[0] + + def get_roles(self, name_or_id: Optional[str] = None) -> List[Role]: + params: ParamsType = {"output": "extend"} + if name_or_id is not None: + if name_or_id.isdigit(): + params["roleids"] = name_or_id + else: + params["filter"] = {"name": name_or_id} + roles = self.role.get(**params) + return [Role(**role) for role in roles] + + def get_user(self, username: str) -> User: + """Fetch a user given its username.""" + users = self.get_users(username) + if not users: + raise ZabbixNotFoundError(f"User with username {username!r} not found") + return users[0] + + def get_users( + self, + username: Optional[str] = None, + role: Optional[UserRole] = None, + search: bool = False, + ) -> List[User]: + params: ParamsType = {"output": "extend"} + filter_params: ParamsType = {} + if search: + params["searchWildcardsEnabled"] = True + if username is not None: + if search: + params["search"] = {compat.user_name(self.version): username} + else: + filter_params[compat.user_name(self.version)] = username + if role: + filter_params[compat.role_id(self.version)] = role + + if filter_params: + params["filter"] = filter_params + + users = self.user.get(**params) + return [User(**user) for user in users] + + def delete_user(self, user: User) -> str: + """Delete a user. + + Returns ID of deleted user. + """ + try: + resp = self.user.delete(user.userid) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete user {user.username!r} ({user.userid})" + ) from e + if not resp or not resp.get("userids"): + raise ZabbixNotFoundError( + f"No user ID returned when deleting user {user.username!r} ({user.userid})" + ) + return resp["userids"][0] + + def update_user( + self, + user: User, + current_password: Optional[str] = None, + new_password: Optional[str] = None, + first_name: Optional[str] = None, + last_name: Optional[str] = None, + role: Optional[UserRole] = None, + autologin: Optional[bool] = None, + autologout: Union[str, int, None] = None, + ) -> str: + """Update a user. Returns ID of updated user.""" + query: ParamsType = {"userid": user.userid} + if current_password and new_password: + query["current_passwd"] = current_password + query["passwd"] = new_password + if first_name: + query["name"] = first_name + if last_name: + query["surname"] = last_name + if role: + query[compat.role_id(self.version)] = role + if autologin is not None: + query["autologin"] = int(autologin) + if autologout is not None: + query["autologout"] = str(autologout) + + # Media and user groups are not supported in this method + + try: + resp = self.user.update(**query) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update user {user.username!r} ({user.userid})" + ) from e + if not resp or not resp.get("userids"): + raise ZabbixNotFoundError( + f"No user ID returned when updating user {user.username!r} ({user.userid})" + ) + return resp["userids"][0] + + def get_mediatype(self, name: str) -> MediaType: + mts = self.get_mediatypes(name=name) + if not mts: + raise ZabbixNotFoundError(f"Media type {name!r} not found") + return mts[0] + + def get_mediatypes( + self, name: Optional[str] = None, search: bool = False + ) -> List[MediaType]: + params: ParamsType = {"output": "extend"} + filter_params: ParamsType = {} + if search: + params["searchWildcardsEnabled"] = True + if name is not None: + if search: + params["search"] = {"name": name} + else: + filter_params["name"] = name + if filter_params: + params["filter"] = filter_params + resp = self.mediatype.get(**params) + return [MediaType(**mt) for mt in resp] + + ## Maintenance + def get_maintenance(self, maintenance_id: str) -> Maintenance: + """Fetch a maintenance given its ID.""" + maintenances = self.get_maintenances(maintenance_ids=[maintenance_id]) + if not maintenances: + raise ZabbixNotFoundError(f"Maintenance {maintenance_id!r} not found") + return maintenances[0] + + def get_maintenances( + self, + maintenance_ids: Optional[List[str]] = None, + hostgroups: Optional[List[HostGroup]] = None, + hosts: Optional[List[Host]] = None, + name: Optional[str] = None, + select_hosts: bool = False, + ) -> List[Maintenance]: + params: ParamsType = { + "output": "extend", + "selectHosts": "extend", + compat.param_host_get_groups(self.version): "extend", + "selectTimeperiods": "extend", + } + filter_params: ParamsType = {} + if maintenance_ids: + params["maintenanceids"] = maintenance_ids + if hostgroups: + params["groupids"] = [hg.groupid for hg in hostgroups] + if hosts: + params["hostids"] = [h.hostid for h in hosts] + if name: + filter_params["name"] = name + if filter_params: + params["filter"] = filter_params + resp = self.maintenance.get(**params) + return [Maintenance(**mt) for mt in resp] + + def create_maintenance( + self, + name: str, + active_since: datetime, + active_till: datetime, + description: Optional[str] = None, + hosts: Optional[List[Host]] = None, + hostgroups: Optional[List[HostGroup]] = None, + data_collection: Optional[DataCollectionMode] = None, + ) -> str: + """Create a one-time maintenance definition.""" + if not hosts and not hostgroups: + raise ZabbixAPIException("At least one host or hostgroup is required") + params: ParamsType = { + "name": name, + "active_since": int(active_since.timestamp()), + "active_till": int(active_till.timestamp()), + "timeperiods": { + "timeperiod_type": 0, + "start_date": int(active_since.timestamp()), + "period": int((active_till - active_since).total_seconds()), + }, + } + if description: + params["description"] = description + if hosts: + if self.version.release >= (6, 0, 0): + params["hosts"] = [{"hostid": h.hostid} for h in hosts] + else: + params["hostids"] = [h.hostid for h in hosts] + if hostgroups: + if self.version.release >= (6, 0, 0): + params["groups"] = {"groupid": hg.groupid for hg in hostgroups} + else: + params["groupids"] = [hg.groupid for hg in hostgroups] + if data_collection: + params["maintenance_type"] = data_collection + resp = self.maintenance.create(**params) + if not resp or not resp.get("maintenanceids"): + raise ZabbixAPICallError(f"Creating maintenance {name!r} returned no ID.") + return resp["maintenanceids"][0] + + def update_maintenance( + self, + maintenance: Maintenance, + hosts: Optional[List[Host]] = None, + ) -> None: + """Update a maintenance definition.""" + params: ParamsType = {"maintenanceid": maintenance.maintenanceid} + if not hosts: + raise ZabbixAPIException("At least one host is required") + if self.version.release >= (6, 0, 0): + params["hosts"] = [{"hostid": h.hostid} for h in hosts] + else: + params["hostids"] = [h.hostid for h in hosts] + try: + self.maintenance.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update maintenance {maintenance.name!r} ({maintenance.maintenanceid})" + ) from e + + def remove_hosts_from_maintenance( + self, + maintenance: Maintenance, + hosts: List[Host], + delete_if_empty: bool = False, + ) -> None: + """Remove one or more hosts from a maintenance. + + Optionally also deletes the maintenance if no hosts remain.""" + # NOTE: we cannot be certain we can compare object identities here + # so we use the actual host IDs to compare with instead. + # E.g. a host fetched with `get_hosts` might differ from a host + # with the same host ID in `maintenance.hosts` + hids = [host.hostid for host in hosts] + new_hosts = [host for host in maintenance.hosts if host.hostid not in hids] + if not new_hosts: + self.update_maintenance(maintenance, new_hosts) + else: + # Result is an empty maintenance - decide course of action + hnames = ", ".join(h.host for h in hosts) + raise ZabbixAPIException( + f"Cannot remove host(s) {hnames} from maintenance {maintenance.name!r}" + ) + + def delete_maintenance(self, maintenance: Maintenance) -> List[str]: + """Deletes one or more maintenances given their IDs + + Returns IDs of deleted maintenances. + """ + try: + resp = self.maintenance.delete(maintenance.maintenanceid) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to delete maintenance {maintenance.name!r}" + ) from e + if not resp or not resp.get("maintenanceids"): + raise ZabbixNotFoundError( + f"No maintenance IDs returned when deleting maintenance {maintenance.name!r}" + ) + return resp["maintenanceids"] + + def get_triggers( + self, + trigger_ids: Union[str, List[str], None] = None, + hosts: Optional[List[Host]] = None, + hostgroups: Optional[List[HostGroup]] = None, + templates: Optional[List[Template]] = None, + description: Optional[str] = None, + priority: Optional[TriggerPriority] = None, + unacknowledged: bool = False, + skip_dependent: Optional[bool] = None, + monitored: Optional[bool] = None, + active: Optional[bool] = None, + expand_description: Optional[bool] = None, + filter: Optional[Dict[str, Any]] = None, + select_hosts: bool = False, + sort_field: Optional[str] = "lastchange", + sort_order: SortOrder = "DESC", + ) -> List[Trigger]: + params: ParamsType = {"output": "extend"} + if hosts: + params["hostids"] = [host.hostid for host in hosts] + if description: + params["search"] = {"description": description} + if skip_dependent is not None: + params["skipDependent"] = int(skip_dependent) + if monitored is not None: + params["monitored"] = int(monitored) + if active is not None: + params["active"] = int(active) + if expand_description is not None: + params["expandDescription"] = int(expand_description) + if filter: + params["filter"] = filter + if trigger_ids: + params["triggerids"] = trigger_ids + if hostgroups: + params["groupids"] = [hg.groupid for hg in hostgroups] + if templates: + params["templateids"] = [t.templateid for t in templates] + if priority: + add_param(params, "filter", "priority", priority) + if unacknowledged: + params["withLastEventUnacknowledged"] = True + if select_hosts: + params["selectHosts"] = "extend" + if sort_field: + params["sortfield"] = sort_field + if sort_order: + params["sortorder"] = sort_order + try: + resp = self.trigger.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch triggers") from e + return [Trigger(**trigger) for trigger in resp] + + def update_trigger( + self, trigger: Trigger, hosts: Optional[List[Host]] = None + ) -> str: + """Update a trigger.""" + params: ParamsType = {"triggerid": trigger.triggerid} + if hosts: + params["hostids"] = [host.hostid for host in hosts] + try: + resp = self.trigger.update(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError( + f"Failed to update trigger {trigger.description!r} ({trigger.triggerid})" + ) from e + if not resp or not resp.get("triggerids"): + raise ZabbixNotFoundError( + f"No trigger ID returned when updating trigger {trigger.description!r} ({trigger.triggerid})" + ) + return resp["triggerids"][0] + + def get_images(self, *image_names: str, select_image: bool = True) -> List[Image]: + """Fetch images, optionally filtered by name(s).""" + params: ParamsType = {"output": "extend"} + if image_names: + params["searchByAny"] = True + params["search"] = {"name": image_names} + if select_image: + params["selectImage"] = True + + try: + resp = self.image.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch images") from e + return [Image(**image) for image in resp] + + def get_maps(self, *map_names: str) -> List[Map]: + """Fetch maps, optionally filtered by name(s).""" + params: ParamsType = {"output": "extend"} + if map_names: + params["searchByAny"] = True + params["search"] = {"name": map_names} + + try: + resp = self.map.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch maps") from e + return [Map(**m) for m in resp] + + def get_media_types(self, *names: str) -> List[MediaType]: + """Fetch media types, optionally filtered by name(s).""" + params: ParamsType = {"output": "extend"} + if names: + params["searchByAny"] = True + params["search"] = {"name": names} + + try: + resp = self.mediatype.get(**params) + except ZabbixAPIException as e: + raise ZabbixAPICallError("Failed to fetch maps") from e + return [MediaType(**m) for m in resp] + + def __getattr__(self, attr: str): + """Dynamically create an object class (ie: host)""" + return ZabbixAPIObjectClass(attr, self) + + +WRITE_OPERATIONS = set( + [ + "create", + "delete", + "update", + "massadd", + "massupdate", + "massremove", + "push", # history + "clear", # history + "acknowledge", # event + "import", # configuration + "propagate", # hostgroup, templategroup + "replacehostinterfaces", # hostinterface + "copy", # discoveryrule + "execute", # script + "resettotp", # user + "unblock", # user + "createglobal", # macro + "deleteglobal", # macro + "updateglobal", # macro + ] +) + + +class ZabbixAPIObjectClass: + def __init__(self, name: str, parent: ZabbixAPI): + self.name = name + self.parent = parent + + def __getattr__(self, attr: str) -> Any: + """Dynamically create a method (ie: get)""" + + def fn(*args: Any, **kwargs: Any) -> Any: + if args and kwargs: + raise TypeError("Found both args and kwargs") + + return self.parent.do_request(f"{self.name}.{attr}", args or kwargs).result # type: ignore + + return fn + + def __getattribute__(self, attr: str) -> Any: + """Intercept attribute calls to customize behavior for specific methods. + + When running in read-only mode, we want to prevent all write operations. + """ + + if attr in WRITE_OPERATIONS: + if object.__getattribute__(self, "parent").read_only: + raise ZabbixAPIException( + "Cannot perform API write operations in read-only mode" + ) + return object.__getattribute__(self, attr) diff --git a/zabbix_auto_config/pyzabbix/compat.py b/zabbix_auto_config/pyzabbix/compat.py new file mode 100644 index 0000000..b52e872 --- /dev/null +++ b/zabbix_auto_config/pyzabbix/compat.py @@ -0,0 +1,142 @@ +"""Compatibility functions to support different Zabbix API versions.""" + +from __future__ import annotations + +from typing import Literal + +from packaging.version import Version + +# TODO (pederhan): rewrite these functions as some sort of declarative data +# structure that can be used to determine correct parameters based on version +# if we end up with a lot of these functions. For now, this is fine. +# OR we could turn it into a mixin class? + +# Compatibility methods for Zabbix API objects properties and method parameters. +# Returns the appropriate property name for the given Zabbix version. +# +# FORMAT: _ +# EXAMPLE: user_name() (User object, name property) +# +# NOTE: All functions follow the same pattern: +# Early return if the version is older than the version where the property +# was deprecated, otherwise return the new property name as the default. + + +def host_proxyid(version: Version) -> Literal["proxy_hostid", "proxyid"]: + # https://support.zabbix.com/browse/ZBXNEXT-8500 + # https://www.zabbix.com/documentation/7.0/en/manual/api/changes#host + if version.release < (7, 0, 0): + return "proxy_hostid" + return "proxyid" + + +def host_available(version: Version) -> Literal["available", "active_available"]: + # TODO: find out why this was changed and what it signifies + # NO URL YET + if version.release < (6, 4, 0): + return "available" + return "active_available" + + +def login_user_name(version: Version) -> Literal["user", "username"]: + # https://support.zabbix.com/browse/ZBXNEXT-8085 + # Deprecated in 5.4.0, removed in 6.4.0 + # login uses different parameter names than the User object before 6.4 + # From 6.4 and onwards, login and user. use the same parameter names + # See: user_name + if version.release < (5, 4, 0): + return "user" + return "username" + + +def proxy_name(version: Version) -> Literal["host", "name"]: + # https://support.zabbix.com/browse/ZBXNEXT-8500 + # https://www.zabbix.com/documentation/7.0/en/manual/api/changes#proxy + if version.release < (7, 0, 0): + return "host" + return "name" + + +def role_id(version: Version) -> Literal["roleid", "type"]: + # https://support.zabbix.com/browse/ZBXNEXT-6148 + # https://www.zabbix.com/documentation/5.2/en/manual/api/changes_5.0_-_5.2#role + if version.release < (5, 2, 0): + return "type" + return "roleid" + + +def user_name(version: Version) -> Literal["alias", "username"]: + # https://support.zabbix.com/browse/ZBXNEXT-8085 + # Deprecated in 5.4, removed in 6.4 + # However, historically we have used "alias" as the parameter name + # pre-6.0.0, so we maintain that behavior here + if version.release < (6, 0, 0): + return "alias" + return "username" + + +def user_medias(version: Version) -> Literal["user_medias", "medias"]: + # https://support.zabbix.com/browse/ZBX-17955 + # Deprecated in 5.2, removed in 6.4 + if version.release < (5, 2, 0): + return "user_medias" + return "medias" + + +def usergroup_hostgroup_rights( + version: Version, +) -> Literal["rights", "hostgroup_rights"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # https://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2 + # Deprecated in 6.2 + if version.release < (6, 2, 0): + return "rights" + return "hostgroup_rights" + + +# NOTE: can we remove this function? Or are we planning on using it to +# assign rights for templates? +def usergroup_templategroup_rights( + version: Version, +) -> Literal["rights", "templategroup_rights"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # https://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2 + # Deprecated in 6.2 + if version.release < (6, 2, 0): + return "rights" + return "templategroup_rights" + + +### API params +# API parameter functions are in the following format: +# param___ +# So to get the "groups" parameter for the "host.get" method, you would call: +# param_host_get_groups() + + +def param_host_get_groups( + version: Version, +) -> Literal["selectHostGroups", "selectGroups"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # hhttps://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2#host + if version.release < (6, 2, 0): + return "selectGroups" + return "selectHostGroups" + + +def param_maintenance_create_groupids( + version: Version, +) -> Literal["groupids", "groups"]: + # https://support.zabbix.com/browse/ZBXNEXT-2592 + # https://www.zabbix.com/documentation/6.2/en/manual/api/changes_6.0_-_6.2#host + if version.release < (6, 2, 0): + return "groups" + return "groupids" + + +def param_maintenance_update_hostids( + version: Version, +) -> Literal["hosts", "hostids"]: + if version.release < (6, 0, 0): + return "hostids" + return "hosts" diff --git a/zabbix_auto_config/pyzabbix/enums.py b/zabbix_auto_config/pyzabbix/enums.py new file mode 100644 index 0000000..dd3acf0 --- /dev/null +++ b/zabbix_auto_config/pyzabbix/enums.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from enum import IntEnum + +from zabbix_auto_config.exceptions import ZACException + + +class UserRole(IntEnum): + USER = 1 + ADMIN = 2 + SUPERADMIN = 3 + GUEST = 4 + + +class UsergroupPermission(IntEnum): + """Usergroup permission levels.""" + + DENY = 0 + READ_ONLY = 2 + READ_WRITE = 3 + + +class AgentAvailable(IntEnum): + """Agent availability status.""" + + UNKNOWN = 0 + AVAILABLE = 1 + UNAVAILABLE = 2 + + +class MonitoringStatus(IntEnum): + """Host monitoring status.""" + + ON = 0 # Yes, 0 is on, 1 is off... + OFF = 1 + UNKNOWN = 3 # undocumented but shows up in autogenerated hosts in triggers + + +class MaintenanceStatus(IntEnum): + """Host maintenance status.""" + + # API values are inverted here compared to monitoring status... + ON = 1 + OFF = 0 + + +class InventoryMode(IntEnum): + """Host inventory mode.""" + + DISABLED = -1 + MANUAL = 0 + AUTOMATIC = 1 + + +class GUIAccess(IntEnum): + """GUI Access for a user group.""" + + DEFAULT = 0 + INTERNAL = 1 + LDAP = 2 + DISABLE = 3 + + +class DataCollectionMode(IntEnum): + """Maintenance type.""" + + ON = 0 + OFF = 1 + + +class TriggerPriority(IntEnum): + UNCLASSIFIED = 0 + INFORMATION = 1 + WARNING = 2 + AVERAGE = 3 + HIGH = 4 + DISASTER = 5 + + +class InterfaceConnectionMode(IntEnum): + """Interface connection mode. + + Controls the value of `useip` when creating interfaces in the API. + """ + + DNS = 0 + IP = 1 + + +class InterfaceType(IntEnum): + """Interface type.""" + + AGENT = 1 + SNMP = 2 + IPMI = 3 + JMX = 4 + + def get_port(self: InterfaceType) -> str: + """Returns the default port for the given interface type.""" + PORTS = { + InterfaceType.AGENT: "10050", + InterfaceType.SNMP: "161", + InterfaceType.IPMI: "623", + InterfaceType.JMX: "12345", + } + try: + return PORTS[self] + except KeyError: + raise ZACException(f"Unknown interface type: {self}") + + +class SNMPSecurityLevel(IntEnum): + __choice_name__ = "SNMPv3 security level" + + # Match casing from Zabbix API + NO_AUTH_NO_PRIV = 0 + AUTH_NO_PRIV = 1 + AUTH_PRIV = 2 + + +class SNMPAuthProtocol(IntEnum): + """Authentication protocol for SNMPv3.""" + + MD5 = 0 + SHA1 = 1 + # >=6.0 only: + SHA224 = 2 + SHA256 = 3 + SHA384 = 4 + SHA512 = 5 + + +class SNMPPrivProtocol(IntEnum): + """Privacy protocol for SNMPv3.""" + + DES = 0 + AES = 1 # < 6.0 only + # >=6.0 only: + AES128 = 1 # >= 6.0 + AES192 = 2 + AES256 = 3 + AES192C = 4 + AES256C = 5 + + +class MonitoredBy(IntEnum): # >= 7.0 + """Type of entity that monitors the host.""" + + SERVER = 0 + PROXY = 1 + PROXY_GROUP = 2 diff --git a/zabbix_auto_config/pyzabbix/types.py b/zabbix_auto_config/pyzabbix/types.py new file mode 100644 index 0000000..bdc2964 --- /dev/null +++ b/zabbix_auto_config/pyzabbix/types.py @@ -0,0 +1,581 @@ +"""Type definitions for Zabbix API objects. + +Since we are supporting multiple versions of the Zabbix API at the same time, +we operate with somewhat lenient model definitions; models change between versions, +and we must ensure that we support them all. + +Fields that only apply to subset of versions are marked by a comment +denoting the version they are introduced/removed in. +""" + +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING +from typing import Any +from typing import Dict +from typing import List +from typing import MutableMapping +from typing import Optional +from typing import Protocol +from typing import Sequence +from typing import Union + +from pydantic import AliasChoices +from pydantic import BaseModel +from pydantic import ConfigDict +from pydantic import Field +from pydantic import ValidationError +from pydantic import ValidationInfo +from pydantic import ValidatorFunctionWrapHandler +from pydantic import WrapValidator +from pydantic import field_serializer +from pydantic import field_validator +from pydantic_core import PydanticCustomError +from typing_extensions import Annotated +from typing_extensions import Literal +from typing_extensions import TypeAliasType +from typing_extensions import TypedDict + +from zabbix_auto_config.pyzabbix.enums import InventoryMode +from zabbix_auto_config.pyzabbix.enums import MonitoringStatus + +if TYPE_CHECKING: + from zabbix_auto_config.pyzabbix.enums import InterfaceType + +SortOrder = Literal["ASC", "DESC"] + + +# Source: https://docs.pydantic.dev/2.7/concepts/types/#named-recursive-types +def json_custom_error_validator( + value: Any, handler: ValidatorFunctionWrapHandler, _info: ValidationInfo +) -> Any: + """Simplify the error message to avoid a gross error stemming from + exhaustive checking of all union options. + """ # noqa: D205 + try: + return handler(value) + except ValidationError: + raise PydanticCustomError( + "invalid_json", + "Input is not valid json", + ) from None + + +Json = TypeAliasType( + "Json", + Annotated[ + Union[ + MutableMapping[str, "Json"], + Sequence["Json"], + str, + int, + float, + bool, + None, + ], + WrapValidator(json_custom_error_validator), + ], +) + + +ParamsType = MutableMapping[str, Json] +"""Type used to construct parameters for API requests. +Can only contain native JSON-serializable types. +""" + + +class ModifyHostItem(TypedDict): + """Argument for a host ID in an API request.""" + + hostid: Union[str, int] + + +ModifyHostParams = List[ModifyHostItem] + +"""A list of host IDs in an API request. + +E.g. `[{"hostid": "123"}, {"hostid": "456"}]` +""" + + +class ModifyGroupItem(TypedDict): + """Argument for a group ID in an API request.""" + + groupid: Union[str, int] + + +ModifyGroupParams = List[ModifyGroupItem] +"""A list of host/template group IDs in an API request. + +E.g. `[{"groupid": "123"}, {"groupid": "456"}]` +""" + + +class ModifyTemplateItem(TypedDict): + """Argument for a template ID in an API request.""" + + templateid: Union[str, int] + + +ModifyTemplateParams = List[ModifyTemplateItem] +"""A list of template IDs in an API request. + +E.g. `[{"templateid": "123"}, {"templateid": "456"}]` +""" + + +class CreateUpdateHostInterfaceParams(TypedDict): + main: bool + port: str + type: InterfaceType + use_ip: bool + ip: str + dns: str + + +class ZabbixAPIError(BaseModel): + """Zabbix API error information.""" + + code: int + message: str + data: Optional[str] = None + + +class ZabbixAPIResponse(BaseModel): + """The raw response from the Zabbix API""" + + jsonrpc: str + id: int + result: Any = None # can subclass this and specify types (ie. ZabbixAPIListResponse, ZabbixAPIStrResponse, etc.) + """Result of API call, if request succeeded.""" + error: Optional[ZabbixAPIError] = None + """Error info, if request failed.""" + + +class ZabbixAPIBaseModel(BaseModel): + """Base model for Zabbix API objects.""" + + model_config = ConfigDict(validate_assignment=True, extra="ignore") + + def model_dump_api(self) -> Dict[str, Any]: + """Dump the model as a JSON-serializable dict used in API calls + where None values are removed.""" + return self.model_dump(mode="json", exclude_none=True) + + +class ZabbixRight(ZabbixAPIBaseModel): + permission: int + id: str + name: Optional[str] = None # name of group (injected by application) + + def model_dump_api(self) -> Dict[str, Any]: + return self.model_dump( + mode="json", include={"permission", "id"}, exclude_none=True + ) + + +class User(ZabbixAPIBaseModel): + userid: str + username: str = Field(..., validation_alias=AliasChoices("username", "alias")) + name: Optional[str] = None + surname: Optional[str] = None + url: Optional[str] = None + autologin: Optional[str] = None + autologout: Optional[str] = None + roleid: Optional[int] = Field( + default=None, validation_alias=AliasChoices("roleid", "type") + ) + # NOTE: Not adding properties we don't use, since Zabbix has a habit of breaking + # its own API by changing names and types of properties between versions. + + +class Usergroup(ZabbixAPIBaseModel): + name: str + usrgrpid: str + gui_access: int + users_status: int + rights: List[ZabbixRight] = [] + hostgroup_rights: List[ZabbixRight] = [] + templategroup_rights: List[ZabbixRight] = [] + users: List[User] = [] + + +class Template(ZabbixAPIBaseModel): + """A template object. Can contain hosts and other templates.""" + + templateid: str + host: str + hosts: List[Host] = [] + templates: List[Template] = [] + """Child templates (templates inherited from this template).""" + + parent_templates: List[Template] = Field( + default_factory=list, + validation_alias=AliasChoices("parentTemplates", "parent_templates"), + serialization_alias="parentTemplates", # match JSON output to API format + ) + """Parent templates (templates this template inherits from).""" + + name: Optional[str] = None + """The visible name of the template.""" + + +class TemplateGroup(ZabbixAPIBaseModel): + groupid: str + name: str + uuid: str + templates: List[Template] = [] + + +class HostGroup(ZabbixAPIBaseModel): + groupid: str + name: str + hosts: List[Host] = [] + flags: int = 0 + internal: Optional[int] = None # <6.2 + templates: List[Template] = [] # <6.2 + + +class HostTag(ZabbixAPIBaseModel): + tag: str + value: str + automatic: Optional[int] = Field(default=None, exclude=True) + """Tag is automatically set by host discovery. Only used for lookups.""" + + +# TODO: expand Host model with all possible fields +# Add alternative constructor to construct from API result +class Host(ZabbixAPIBaseModel): + hostid: str + host: str = "" + description: Optional[str] = None + groups: List[HostGroup] = Field( + default_factory=list, + # Compat for >= 6.2.0 + validation_alias=AliasChoices("groups", "hostgroups"), + ) + templates: List[Template] = Field(default_factory=list) + parent_templates: List[Template] = Field( + default_factory=list, + # Accept both casings + validation_alias=AliasChoices("parentTemplates", "parent_templates"), + ) + inventory: Dict[str, Any] = Field(default_factory=dict) + proxyid: Optional[str] = Field( + None, + # Compat for <7.0.0 + validation_alias=AliasChoices("proxyid", "proxy_hostid"), + ) + proxy_address: Optional[str] = None + maintenance_status: Optional[str] = None + zabbix_agent: Optional[int] = Field( + None, validation_alias=AliasChoices("available", "active_available") + ) + status: Optional[MonitoringStatus] = None + macros: List[Macro] = Field(default_factory=list) + interfaces: List[HostInterface] = Field(default_factory=list) + tags: List[HostTag] = Field(default_factory=list) + inventory_mode: InventoryMode = InventoryMode.AUTOMATIC + + def __str__(self) -> str: + return f"{self.host!r} ({self.hostid})" + + @field_validator("inventory", mode="before") + @classmethod + def _empty_list_is_empty_dict(cls, v: Any) -> Any: + """Converts empty list arg to empty dict""" + # Due to a PHP quirk, an empty associative array + # is serialized as an array (list) instead of a map + # while when it's populated it's always a map (dict) + # Note how the docs state that this is an "object", not an array (list) + # https://www.zabbix.com/documentation/current/en/manual/api/reference/host/object#host-inventory + if v == []: + return {} + return v + + +class HostInterface(ZabbixAPIBaseModel): + type: int + ip: str + dns: Optional[str] = None + port: str + useip: bool # this is an int in the API + main: int + # SNMP details + details: Dict[str, Any] = Field(default_factory=dict) + # Values not required for creation: + interfaceid: Optional[str] = None + available: Optional[int] = None + hostid: Optional[str] = None + bulk: Optional[int] = None + + @field_validator("details", mode="before") + @classmethod + def _empty_list_is_empty_dict(cls, v: Any) -> Any: + """Converts empty list arg to empty dict""" + # Due to a PHP quirk, an empty associative array + # is serialized as an array (list) instead of a map + # while when it's populated it's always a map (dict) + # Note how the docs state that this is an "object", not an array (list) + # https://www.zabbix.com/documentation/current/en/manual/api/reference/hostinterface/object#details + if v == []: + return {} + return v + + @field_serializer("useip", when_used="json") + def bool_to_int(self, value: bool, _info) -> int: + return int(value) + + +class CreateHostInterfaceDetails(ZabbixAPIBaseModel): + version: int + bulk: Optional[int] = None + community: Optional[str] = None + max_repetitions: Optional[int] = None + securityname: Optional[str] = None + securitylevel: Optional[int] = None + authpassphrase: Optional[str] = None + privpassphrase: Optional[str] = None + authprotocol: Optional[int] = None + privprotocol: Optional[int] = None + contextname: Optional[str] = None + + +class UpdateHostInterfaceDetails(ZabbixAPIBaseModel): + version: Optional[int] = None + bulk: Optional[int] = None + community: Optional[str] = None + max_repetitions: Optional[int] = None + securityname: Optional[str] = None + securitylevel: Optional[int] = None + authpassphrase: Optional[str] = None + privpassphrase: Optional[str] = None + authprotocol: Optional[int] = None + privprotocol: Optional[int] = None + contextname: Optional[str] = None + + +class Proxy(ZabbixAPIBaseModel): + proxyid: str + name: str = Field(..., validation_alias=AliasChoices("host", "name")) + hosts: List[Host] = Field(default_factory=list) + status: Optional[int] = None + operating_mode: Optional[int] = None + address: str = Field( + validation_alias=AliasChoices( + "address", # >=7.0.0 + "proxy_address", # <7.0.0 + ) + ) + compatibility: Optional[int] = None # >= 7.0 + version: Optional[int] = None # >= 7.0 + + def __hash__(self) -> str: + return self.proxyid # kinda hacky, but lets us use it in dicts + + +class MacroBase(ZabbixAPIBaseModel): + macro: str + value: Optional[str] = None # Optional in case secret value + type: int + """Macro type. 0 - text, 1 - secret, 2 - vault secret (>=7.0)""" + description: str + + +class Macro(MacroBase): + """Macro object. Known as 'host macro' in the Zabbix API.""" + + hostid: str + hostmacroid: str + automatic: Optional[int] = None # >= 7.0 only. 0 = user, 1 = discovery rule + hosts: List[Host] = Field(default_factory=list) + templates: List[Template] = Field(default_factory=list) + + +class GlobalMacro(MacroBase): + globalmacroid: str + + +class Item(ZabbixAPIBaseModel): + itemid: str + delay: Optional[str] = None + hostid: Optional[str] = None + interfaceid: Optional[str] = None + key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("key_", "key") + ) + name: Optional[str] = None + type: Optional[int] = None + url: Optional[str] = None + value_type: Optional[int] = None + description: Optional[str] = None + history: Optional[str] = None + lastvalue: Optional[str] = None + hosts: List[Host] = [] + + +class Role(ZabbixAPIBaseModel): + roleid: str + name: str + type: int + readonly: int # 0 = read-write, 1 = read-only + + +class MediaType(ZabbixAPIBaseModel): + mediatypeid: str + name: str + type: int + description: Optional[str] = None + + +class UserMedia(ZabbixAPIBaseModel): + """Media attached to a user object.""" + + # https://www.zabbix.com/documentation/current/en/manual/api/reference/user/object#media + mediatypeid: str + sendto: str + active: int = 0 # 0 = enabled, 1 = disabled (YES REALLY!) + severity: int = 63 # all (1111 in binary - all bits set) + period: str = "1-7,00:00-24:00" # 24/7 + + +class TimePeriod(ZabbixAPIBaseModel): + period: int + timeperiod_type: int + start_date: Optional[datetime] = None + start_time: Optional[int] = None + every: Optional[int] = None + dayofweek: Optional[int] = None + day: Optional[int] = None + month: Optional[int] = None + + +class ProblemTag(ZabbixAPIBaseModel): + tag: str + operator: Optional[int] + value: Optional[str] + + +class Maintenance(ZabbixAPIBaseModel): + maintenanceid: str + name: str + active_since: Optional[datetime] = None + active_till: Optional[datetime] = None + description: Optional[str] = None + maintenance_type: Optional[int] = None + tags_evaltype: Optional[int] = None + timeperiods: List[TimePeriod] = [] + tags: List[ProblemTag] = [] + hosts: List[Host] = [] + hostgroups: List[HostGroup] = Field( + default_factory=list, validation_alias=AliasChoices("groups", "hostgroups") + ) + + +class Event(ZabbixAPIBaseModel): + eventid: str + source: int + object: int + objectid: str + acknowledged: int + clock: datetime + name: str + value: Optional[int] = None # docs seem to imply this is optional + severity: int + # NYI: + # r_eventid + # c_eventid + # cause_eventid + # correlationid + # userid + # suppressed + # opdata + # urls + + +class Trigger(ZabbixAPIBaseModel): + triggerid: str + description: Optional[str] + expression: Optional[str] + event_name: str + opdata: str + comments: str + error: str + flags: int + lastchange: datetime + priority: int + state: int + templateid: Optional[str] + type: int + url: str + url_name: Optional[str] = None # >6.0 + value: int + recovery_mode: int + recovery_expression: str + correlation_mode: int + correlation_tag: str + manual_close: int + uuid: str + hosts: List[Host] = [] + # NYI: + # groups: List[HostGroup] = Field( + # default_factory=list, validation_alias=AliasChoices("groups", "hostgroups") + # ) + # items + # functions + # dependencies + # discoveryRule + # lastEvent + + +class Image(ZabbixAPIBaseModel): + imageid: str + name: str + imagetype: int + # NOTE: Optional so we can fetch an image without its data + # This lets us get the IDs of all images without keeping the data in memory + image: Optional[str] = None + + +class Map(ZabbixAPIBaseModel): + sysmapid: str + name: str + height: int + width: int + backgroundid: Optional[str] = None # will this be an empty string instead? + # Other fields are omitted. We only use this for export and import. + + +class ImportRule(BaseModel): # does not need to inherit from ZabbixAPIBaseModel + createMissing: bool + updateExisting: Optional[bool] = None + deleteMissing: Optional[bool] = None + + +class ImportRules(ZabbixAPIBaseModel): + discoveryRules: ImportRule + graphs: ImportRule + groups: Optional[ImportRule] = None # < 6.2 + host_groups: Optional[ImportRule] = None # >= 6.2 + hosts: ImportRule + httptests: ImportRule + images: ImportRule + items: ImportRule + maps: ImportRule + mediaTypes: ImportRule + template_groups: Optional[ImportRule] = None # >= 6.2 + templateLinkage: ImportRule + templates: ImportRule + templateDashboards: ImportRule + triggers: ImportRule + valueMaps: ImportRule + templateScreens: Optional[ImportRule] = None # < 6.0 + applications: Optional[ImportRule] = None # < 6.0 + screens: Optional[ImportRule] = None # < 6.0 + + model_config = ConfigDict(validate_assignment=True) + + +class ModelWithHosts(Protocol): + hosts: List[Host] diff --git a/zabbix_auto_config/pyzabbix/utils.py b/zabbix_auto_config/pyzabbix/utils.py new file mode 100644 index 0000000..90a0749 --- /dev/null +++ b/zabbix_auto_config/pyzabbix/utils.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import random +import re +from typing import TYPE_CHECKING +from typing import Optional + +from zabbix_auto_config.exceptions import ZabbixAPICallError +from zabbix_auto_config.exceptions import ZabbixNotFoundError +from zabbix_auto_config.pyzabbix.client import ZabbixAPI + +if TYPE_CHECKING: + from zabbix_auto_config.pyzabbix.types import Proxy + + +def get_random_proxy(client: ZabbixAPI, pattern: Optional[str] = None) -> Proxy: + """Fetch a random proxy, optionally matching a regex pattern.""" + proxies = client.get_proxies() + if not proxies: + raise ZabbixNotFoundError("No proxies found") + if pattern: + try: + re_pattern = re.compile(pattern) + except re.error: + raise ZabbixAPICallError(f"Invalid proxy regex pattern: {pattern!r}") + proxies = [proxy for proxy in proxies if re_pattern.match(proxy.name)] + if not proxies: + raise ZabbixNotFoundError(f"No proxies matching pattern {pattern!r}") + return random.choice(proxies) diff --git a/zabbix_auto_config/state.py b/zabbix_auto_config/state.py index 4b62986..a6ef796 100644 --- a/zabbix_auto_config/state.py +++ b/zabbix_auto_config/state.py @@ -4,7 +4,7 @@ import types from dataclasses import asdict from multiprocessing.managers import BaseManager -from multiprocessing.managers import NamespaceProxy +from multiprocessing.managers import NamespaceProxy # type: ignore # why unexported? from typing import Any from typing import Dict from typing import Optional @@ -14,27 +14,36 @@ @dataclass class State: + """Health state of a process.""" + ok: bool = True - """True if process has not encountered an error in its most recent run.""" + """Status of the process. False if an error has occurred in the most recent run.""" + + # BELOW: Only applicable if ok is False + error: Optional[str] = None - """The error message if `ok` is False.""" + """Error message for most recent error.""" + error_type: Optional[str] = None - """The type of error if `ok` is False.""" - error_count: int = 0 - """The number of errors the process has encountered.""" + """Error type name for most recent error""" + error_time: Optional[float] = None - """The timestamp of the most recent error.""" + """Timestamp of the most recent error.""" + + error_count: int = 0 + """Number of errors the process has encountered since starting.""" def asdict(self) -> Dict[str, Any]: """Return dict representation of the State object.""" + # NOTE: just construct dict ourselves instead? return asdict(self) def set_ok(self) -> None: - """Set the current state to OK, clear error information. + """Set current state to OK, clear error information. NOTE ---- - This does not reset the error count. + Does not reset the error count. """ self.ok = True self.error = None @@ -42,6 +51,7 @@ def set_ok(self) -> None: self.error_time = None def set_error(self, exc: Exception) -> None: + """Set current state to error and record error information.""" self.ok = False self.error = str(exc) self.error_type = type(exc).__name__ @@ -72,10 +82,18 @@ def wrapper(*args, **kwargs): return result -Manager.register("State", State, proxytype=StateProxy) +class StateManager(BaseManager): + """Custom subclass of BaseManager with type annotations for custom types.""" + + # We need to do this to make mypy happy with calling .State() on the manager class + # This stub will be overwritten by the actual method created by register() + def State(self) -> State: ... + + +StateManager.register("State", State, proxytype=StateProxy) -def get_manager() -> Manager: - m = Manager() +def get_manager() -> StateManager: + m = StateManager() m.start() return m diff --git a/zabbix_auto_config/utils.py b/zabbix_auto_config/utils.py index 33dab6f..99b81cb 100644 --- a/zabbix_auto_config/utils.py +++ b/zabbix_auto_config/utils.py @@ -8,14 +8,17 @@ import queue import re from pathlib import Path +from typing import TYPE_CHECKING from typing import Dict -from typing import Iterable from typing import List from typing import MutableMapping -from typing import Set -from typing import Tuple from typing import Union +from zabbix_auto_config.pyzabbix.types import HostTag + +if TYPE_CHECKING: + from zabbix_auto_config._types import ZacTags + def is_valid_regexp(pattern: str): try: @@ -33,13 +36,12 @@ def is_valid_ip(ip: str): return False -def zabbix_tags2zac_tags(zabbix_tags: Iterable[Dict[str, str]]) -> Set[Tuple[str, ...]]: - return {tuple(tag.values()) for tag in zabbix_tags} +def zabbix_tags2zac_tags(zabbix_tags: List[HostTag]) -> ZacTags: + return {(tag.tag, tag.value) for tag in zabbix_tags} -def zac_tags2zabbix_tags(zac_tags: Iterable[Tuple[str, str]]) -> List[Dict[str, str]]: - zabbix_tags = [{"tag": tag[0], "value": tag[1]} for tag in zac_tags] - return zabbix_tags +def zac_tags2zabbix_tags(zac_tags: ZacTags) -> List[HostTag]: + return [HostTag(tag=tag[0], value=tag[1]) for tag in zac_tags] def read_map_file(path: Union[str, Path]) -> Dict[str, List[str]]: