Skip to content

Commit

Permalink
Restructure schema locations.
Browse files Browse the repository at this point in the history
* Move schemas from config and context into their own files under the
  antsibull/schemas directory.
* Move doc parsing related schemas to antsibull/schemas/docs.
* Add a deprecated antsibull.schemas.ansible_doc module for backwards
  compat.  The new location is antsibull.schemas.docs.ansible_doc
* Extract validators for converting booleans and None from strings to a
  schemas/validators.py file.  These were used by both config and
  context so it makes sense to put them in one place.
* Move ContextDict into utils.collections to avoid circular import.
  The app_context.py code requires the context schemas and the context
  schemas require the ContextDict data structure.  ContextDict was
  living in app_context.py which would lead to a circular import.
  Moving it to utils/collections.py breaks the cycle.
* Update documentation and tests for the new location of docs schemas
  • Loading branch information
abadger committed Jul 12, 2021
1 parent e411747 commit 76b8933
Show file tree
Hide file tree
Showing 16 changed files with 534 additions and 500 deletions.
184 changes: 4 additions & 180 deletions antsibull/app_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,9 @@ def run(args):
import functools
import sys
import typing as t
from collections.abc import Container, Mapping, Sequence, Set
from contextlib import contextmanager

import pydantic as p

from .config import DEFAULT_LOGGING_CONFIG, LoggingModel
from .schemas.context import AppContext, LibContext
from .vendored.collections import ImmutableDict

if sys.version_info < (3, 7):
Expand Down Expand Up @@ -111,179 +108,6 @@ def run(args):
app_ctx = contextvars.ContextVar('app_ctx')


def _make_contained_containers_immutable(obj):
"""
Make contained containers into immutable containers.
This is a helper for :func:`_make_immutable`. It takes an iterable container and turns all
values inside of it into an immutable container. Be careful what containers you pass in.
Mappings, for instance, will be processed without error but the results are likely not what you
want because Mappings have both a key and a value.
"""
temp_list = []
for value in obj:
if isinstance(value, Container):
value = _make_immutable(value)
temp_list.append(value)
return temp_list


def _make_immutable(obj: t.Any) -> t.Any:
"""Recursively convert a container and objects inside of it into immutable data types."""
if isinstance(obj, (str, bytes)):
# Strings first because they are also sequences
return obj

if isinstance(obj, Mapping):
temp_dict = {}
for key, value in obj.items():
if isinstance(value, Container):
value = _make_immutable(value)
temp_dict[key] = value
return ImmutableDict(temp_dict)

if isinstance(obj, Set):
temp_sequence = _make_contained_containers_immutable(obj)
return frozenset(temp_sequence)

if isinstance(obj, Sequence):
temp_sequence = _make_contained_containers_immutable(obj)
return tuple(temp_sequence)

return obj


class ContextDict(ImmutableDict):
def __init__(self, *args, **kwargs) -> None:
if not kwargs and len(args) == 1 and isinstance(args[0], Mapping):
# Avoid making an intermediate dict if we were only passed a dict to initialize with
tmp_dict = args[0]
else:
# Otherwise we need the dict constructor to initialize a new dict for us
tmp_dict = dict(*args, **kwargs)

toplevel = {}
for key, value in tmp_dict.items():
toplevel[key] = _make_immutable(value)
super().__init__(toplevel)

@classmethod
def __get_validators__(cls):
yield cls.validate_and_convert

@classmethod
def validate_and_convert(cls, value: t.Mapping) -> 'ContextDict':
if isinstance(value, ContextDict):
# optimization. If it's already an ImmutableContext, we don't need to recursively
# convert things to immutable again.
return value

# Typically this will convert from a dict to an ImmutableContext
return cls(value)


class BaseModel(p.BaseModel):
"""
Configuration for all Context object classes.
:cvar Config: Sets the following information
:cvar allow_mutation: ``False``. Prevents setattr on the contexts.
:cvar extra: ``p.Extra.forbid``. Prevents extra fields on the contexts.
:cvar validate_all: ``True``. Validates default values as well as user supplied ones.
"""

class Config:
"""
Set default configuration for building the context models.
:cvar allow_mutation: ``False``. Prevents setattr on the contexts.
:cvar extra: ``p.Extra.forbid``. Prevents extra fields on the contexts.
:cvar validate_all: ``True``. Validates default values as well as user supplied ones.
"""

allow_mutation = False
extra = p.Extra.forbid
validate_all = True


class AppContext(BaseModel):
"""
Structure and defaults of the app_ctx.
:ivar extra: a mapping of arg/config keys to values. Anything in here is unchecked by a
schema. These are usually leftover command line arguments and config entries. If
values stored in extras need default values, they need to be set outside of the context
or the entries can be given an actual entry in the AppContext to take advantage of the
schema's checking, normalization, and default setting.
:ivar ansible_base_url: Url to the ansible-core git repo.
:ivar breadcrumbs: If True, build with breadcrumbs on the plugin pages (this takes more memory).
:ivar galaxy_url: URL of the galaxy server to get collection info from
:ivar indexes: If True, create index pages for all collections and all plugins in a collection.
:ivar logging_cfg: Configuration of the application logging
:ivar pypi_url: URL of the pypi server to query for information
"""

extra: ContextDict = ContextDict()
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
ansible_base_url: p.HttpUrl = 'https://github.com/ansible/ansible/'
breadcrumbs: p.StrictBool = True
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
galaxy_url: p.HttpUrl = 'https://galaxy.ansible.com/'
indexes: p.StrictBool = True
logging_cfg: LoggingModel = LoggingModel.parse_obj(DEFAULT_LOGGING_CONFIG)
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
pypi_url: p.HttpUrl = 'https://pypi.org/'

@p.validator('breadcrumbs', 'indexes', pre=True)
def convert_to_bool(cls, value):
if isinstance(value, str):
if value.lower() in ('0', 'false', 'no', 'n', 'f', ''):
value = False
else:
value = True
elif isinstance(value, int):
if value == 0:
value = False
else:
value = True
return value


class LibContext(BaseModel):
"""
Structure and defaults of the lib_ctx.
:ivar chunksize: number of bytes to read or write at one time for network or file IO
:ivar process_max: Maximum number of worker processes for parallel operations
:ivar thread_max: Maximum number of helper threads for parallel operations
:ivar max_retries: Maximum number of times to retry an http request (in case of timeouts and
other transient problems.
:ivar doc_parsing_backend: The backend to use for parsing the documentation strings from
plugins. 'ansible-internal' is the fastest. 'ansible-doc' exists in case of problems with
the ansible-internal backend.
"""

chunksize: int = 4096
doc_parsing_backend: str = 'ansible-internal'
max_retries: int = 10
process_max: t.Optional[int] = None
thread_max: int = 64

@p.validator('process_max', pre=True)
def convert_to_none(cls, value):
"""
Convert process_max "null" and "none" to None.
When this is set in a config file, it could be the string "None" or "Null" to mean, use
all available CPU cores. The :python:mod:`multiprocessing` API that this is sent to
needs a Python None, though. So convert the string into an actual None in this validator.
"""
if isinstance(value, str) and value.lower() in ('none', 'null'):
value = None
return value


class ContextReturn(t.NamedTuple):
"""
NamedTuple for the return value of :func:`create_contexts`.
Expand Down Expand Up @@ -461,9 +285,9 @@ def app_and_lib_context(context_data: ContextReturn):
with app_and_lib_context(context_data):
do_something()
"""
with lib_context(context_data.lib_ctx) as lib_ctx:
with app_context(context_data.app_ctx) as app_ctx:
yield (app_ctx, lib_ctx)
with lib_context(context_data.lib_ctx) as new_lib_ctx:
with app_context(context_data.app_ctx) as new_app_ctx:
yield (new_app_ctx, new_lib_ctx)


#
Expand Down
10 changes: 5 additions & 5 deletions antsibull/cli/doc_commands/stable.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,11 @@ def normalize_plugin_info(plugin_type: str,
:arg plugin_type: The type of plugins that we're getting docs for.
:arg plugin_info: Mapping of plugin_info. The toplevel keys are plugin names.
See the schema in :mod:`antsibull.schemas` for what the data should look like and just how
much conversion we can perform on it.
See the schema in :mod:`antsibull.schemas.docs` for what the data should look like and just
how much conversion we can perform on it.
:returns: A tuple containing a "copy" of plugin_info with all of the data normalized and a list
of nonfatal errors. The plugin_info dict will follow the structure expressed in the schemas
in :mod:`antsibull.schemas`. The nonfatal errors are strings representing the problems
in :mod:`antsibull.schemas.docs`. The nonfatal errors are strings representing the problems
encountered.
"""
new_info = {}
Expand Down Expand Up @@ -149,8 +149,8 @@ async def normalize_all_plugin_info(plugin_info: t.Mapping[str, t.Mapping[str, t
Normalize the data in plugin_info so that it is ready to be passed to the templates.
:arg plugin_info: Mapping of information about plugins. This contains information about all of
the plugins that are to be documented. See the schema in :mod:`antsibull.schemas` for the
structure of the information.
the plugins that are to be documented. See the schema in :mod:`antsibull.schemas.docs` for
the structure of the information.
:returns: A tuple of plugin_info (this is a "copy" of the input plugin_info with all of the
data normalized) and a mapping of errors. The plugin_info may have less records than the
input plugin_info if there were plugin records which failed to validate. The mapping of
Expand Down
142 changes: 1 addition & 141 deletions antsibull/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,160 +9,20 @@
import typing as t

import perky
import pydantic as p
import twiggy.formats
import twiggy.outputs

from .logging import log
from .schemas.config import ConfigModel


mlog = log.fields(mod=__name__)

#: Valid choices for a logging level field
LEVEL_CHOICES_F = p.Field(..., regex='^(CRITICAL|ERROR|WARNING|NOTICE|INFO|DEBUG|DISABLED)$')

#: Valid choices for a logging level field
DOC_PARSING_BACKEND_CHOICES_F = p.Field(
'ansible-internal', regex='^(ansible-doc|ansible-internal)$')

#: Valid choice of the logging version field
VERSION_CHOICES_F = p.Field(..., regex=r'1\.0')

#: System config file location.
SYSTEM_CONFIG_FILE = '/etc/antsibull.cfg'

#: Per-user config file location.
USER_CONFIG_FILE = '~/.antsibull.cfg'


#
# Configuration file schema
#

class BaseModel(p.BaseModel):
class Config:
allow_mutation = False
extra = p.Extra.forbid
validate_all = True


# pyre-ignore[13]: BaseModel initializes attributes when data is loaded
class LogFiltersModel(BaseModel):
filter: t.Union[str, t.Callable]
args: t.Sequence[t.Any] = []
kwargs: t.Mapping[str, t.Any] = {}


# pyre-ignore[13]: BaseModel initializes attributes when data is loaded
class LogEmitterModel(BaseModel):
output_name: str
level: str = LEVEL_CHOICES_F
filters: t.List[LogFiltersModel] = []


# pyre-ignore[13]: BaseModel initializes attributes when data is loaded
class LogOutputModel(BaseModel):
output: t.Union[str, t.Callable]
args: t.Sequence[t.Any] = []
format: t.Union[str, t.Callable] = twiggy.formats.line_format
kwargs: t.Mapping[str, t.Any] = {}

@p.validator('args')
def expand_home_dir_args(cls, args_field: t.MutableSequence,
values: t.Mapping) -> t.MutableSequence:
"""Expand tilde in the arguments of specific outputs."""
if values['output'] in ('twiggy.outputs.FileOutput', twiggy.outputs.FileOutput):
if args_field:
args_field[0] = os.path.expanduser(args_field[0])
return args_field

@p.validator('kwargs')
def expand_home_dir_kwargs(cls, kwargs_field: t.MutableMapping,
values: t.Mapping) -> t.MutableMapping:
"""Expand tilde in the keyword arguments of specific outputs."""
if values['output'] in ('twiggy.outputs.FileOutput', twiggy.outputs.FileOutput):
if 'name' in kwargs_field:
kwargs_field['name'] = os.path.expanduser(kwargs_field['name'])
return kwargs_field


class LoggingModel(BaseModel):
emitters: t.Optional[t.Dict[str, LogEmitterModel]] = {}
incremental: bool = False
outputs: t.Optional[t.Dict[str, LogOutputModel]] = {}
version: str = VERSION_CHOICES_F


#: Default logging configuration
DEFAULT_LOGGING_CONFIG = LoggingModel.parse_obj(
{'version': '1.0',
'outputs': {
'logfile': {
'output': 'twiggy.outputs.FileOutput',
'args': [
'~/antsibull.log'
]
},
'stderr': {
'output': 'twiggy.outputs.StreamOutput',
'format': 'twiggy.formats.shell_format'
},
},
'emitters': {
'all': {
'level': 'INFO',
'output_name': 'logfile',
'filters': []
},
'problems': {
'level': 'WARNING',
'output_name': 'stderr',
'filters': []
},
}
})


class ConfigModel(BaseModel):
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
ansible_base_url: p.HttpUrl = 'https://github.com/ansible/ansible'
breadcrumbs: p.StrictBool = True
chunksize: int = 4096
doc_parsing_backend: str = DOC_PARSING_BACKEND_CHOICES_F
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
galaxy_url: p.HttpUrl = 'https://galaxy.ansible.com/'
indexes: p.StrictBool = True
logging_cfg: LoggingModel = DEFAULT_LOGGING_CONFIG
max_retries: int = 10
process_max: t.Optional[int] = None
# pyre-ignore[8]: https://github.com/samuelcolvin/pydantic/issues/1684
pypi_url: p.HttpUrl = 'https://pypi.org/'
thread_max: int = 80

@p.validator('process_max', pre=True)
def convert_to_none(cls, value):
if value is None:
# Default is already None
return None
if value.lower() in ('none', 'null'):
value = None
return value

@p.validator('breadcrumbs', 'indexes', pre=True)
def convert_to_bool(cls, value):
if isinstance(value, str):
if value.lower() in ('0', 'false', 'no', 'n', 'f', ''):
value = False
else:
value = True
elif isinstance(value, int):
if value == 0:
value = False
else:
value = True
return value


def find_config_files(conf_files: t.Iterable[str]) -> t.List[str]:
"""
Find all config files that exist.
Expand Down
Loading

0 comments on commit 76b8933

Please sign in to comment.