Skip to content

Commit

Permalink
Support Python checks defined by a pyproject.toml file
Browse files Browse the repository at this point in the history
  • Loading branch information
ofek committed Jan 28, 2022
1 parent 9a4cf06 commit 01c78dc
Show file tree
Hide file tree
Showing 7 changed files with 256 additions and 70 deletions.
51 changes: 34 additions & 17 deletions datadog_checks_dev/datadog_checks/dev/tooling/commands/dep.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,15 @@
from packaging.markers import InvalidMarker, Marker
from packaging.specifiers import SpecifierSet

from ...fs import read_file_lines, write_file_lines
from ...fs import basepath, read_file_lines, write_file_lines
from ..constants import get_agent_requirements
from ..dependencies import read_agent_dependencies, read_check_dependencies
from ..utils import get_check_req_file, get_valid_checks
from ..dependencies import (
read_agent_dependencies,
read_check_dependencies,
update_check_dependencies,
update_check_dependencies_at,
)
from ..utils import get_check_req_file, get_valid_checks, has_project_file
from .console import CONTEXT_SETTINGS, abort, echo_failure, echo_info
from .validate.licenses import extract_classifier_value

Expand Down Expand Up @@ -78,21 +83,27 @@ def pin(package, version, marker):
files_to_update[dependency_definition.file_path].append(dependency_definition)

for file_path, dependency_definitions in sorted(files_to_update.items()):
old_lines = read_file_lines(file_path)

new_lines = old_lines.copy()

for dependency_definition in dependency_definitions:
requirement = dependency_definition.requirement
if marker != requirement.marker:
continue

requirement.specifier = SpecifierSet(f'=={version}')
new_lines[dependency_definition.line_number] = f'{requirement}\n'

if new_lines != old_lines:
files_updated += 1
write_file_lines(file_path, new_lines)
if basepath(file_path) == 'pyproject.toml':
if update_check_dependencies_at(file_path, dependency_definitions):
files_updated += 1
else:
old_lines = read_file_lines(file_path)

new_lines = old_lines.copy()

for dependency_definition in dependency_definitions:
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

if new_lines != old_lines:
files_updated += 1
write_file_lines(file_path, new_lines)

if not files_updated:
abort('No dependency definitions to update')
Expand Down Expand Up @@ -167,15 +178,21 @@ def sync():

if deps_to_update:
files_updated += 1
check_req_file = get_check_req_file(check_name)
old_lines = read_file_lines(check_req_file)
new_lines = old_lines.copy()

for dependency_definition, new_version in deps_to_update.items():
dependency_definition.requirement.specifier = new_version
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

write_file_lines(check_req_file, new_lines)
if has_project_file(check_name):
update_check_dependencies(check_name, list(deps_to_update))
else:
check_req_file = get_check_req_file(check_name)
old_lines = read_file_lines(check_req_file)
new_lines = old_lines.copy()

for dependency_definition, new_version in deps_to_update.items():
dependency_definition.requirement.specifier = new_version
new_lines[dependency_definition.line_number] = f'{dependency_definition.requirement}\n'

write_file_lines(check_req_file, new_lines)

if not files_updated:
echo_info('All dependencies synced.')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ...constants import get_agent_requirements, get_root
from ...dependencies import read_agent_dependencies, read_check_base_dependencies, read_check_dependencies
from ...testing import process_checks_option
from ...utils import complete_valid_checks
from ...utils import complete_valid_checks, get_project_file, has_project_file
from ..console import CONTEXT_SETTINGS, abort, annotate_error, annotate_errors, echo_failure


Expand Down Expand Up @@ -46,7 +46,13 @@ def verify_base_dependency(source, name, base_versions, force_pinned=True, min_b
failed = False
for specifier_set, dependency_definitions in base_versions.items():
checks = sorted(dep.check_name for dep in dependency_definitions)
file = os.path.join(get_root(), format_check_usage(checks, source), 'setup.py')
files = []
for check_name in checks:
if has_project_file(check_name):
files.append(get_project_file(check_name))
else:
files.append(os.path.join(get_root(), check_name, 'setup.py'))
file = ','.join(files)
if not specifier_set and force_pinned:
message = f'Unspecified version found for dependency `{name}`: {format_check_usage(checks, source)}'
echo_failure(message)
Expand Down Expand Up @@ -170,31 +176,36 @@ def dep(check, require_base_check_version, min_base_check_version):
abort()

for check_name in checks:
req_file = os.path.join(root, check_name, 'requirements.in')
if has_project_file(check_name):
req_source = get_project_file(check_name)
base_req_source = req_source
else:
req_source = os.path.join(root, check_name, 'requirements.in')
base_req_source = os.path.join(root, check_name, 'setup.py')

check_dependencies, check_errors = read_check_dependencies(check_name)
annotate_errors(req_file, check_errors)
annotate_errors(req_source, check_errors)
if check_errors:
for check_error in check_errors:
echo_failure(check_error)
abort()

check_base_dependencies, check_base_errors = read_check_base_dependencies(check_name)
base_req_file = os.path.join(root, check_name, 'setup.py')
annotate_errors(base_req_file, check_base_errors)
annotate_errors(base_req_source, check_base_errors)
if check_base_errors:
for check_error in check_base_errors:
echo_failure(check_error)
abort()

for name, versions in sorted(check_dependencies.items()):
if not verify_dependency('Checks', name, versions, req_file):
if not verify_dependency('Checks', name, versions, req_source):
failed = True

if name not in agent_dependencies:
failed = True
message = f'Dependency needs to be synced: {name}'
echo_failure(message)
annotate_error(req_file, message)
annotate_error(req_source, message)

check_base_dependencies, check_base_errors = read_check_base_dependencies(checks)
check_dependencies, check_errors = read_check_dependencies(checks)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,58 +5,76 @@

import click

from ....fs import basepath
from ...testing import process_checks_option
from ...utils import complete_valid_checks, get_setup_file, normalize_package_name, read_setup_file
from ...utils import (
complete_valid_checks,
get_project_file,
get_setup_file,
has_project_file,
load_project_file_cached,
normalize_package_name,
normalize_project_name,
read_setup_file,
)
from ..console import CONTEXT_SETTINGS, abort, annotate_display_queue, echo_failure, echo_info, echo_success

# Some integrations aren't installable via the integration install command, so exclude them from the name requirements
EXCLUDE_CHECKS = ["datadog_checks_downloader", "datadog_checks_dev", "datadog_checks_base"]


@click.command('package', context_settings=CONTEXT_SETTINGS, short_help='Validate `setup.py` files')
def read_project_name(check_name):
if has_project_file(check_name):
return get_project_file(check_name), load_project_file_cached(check_name)['project']['name']

lines = read_setup_file(check_name)
for _, line in lines:
match = re.search("name=['\"](.*)['\"]", line)
if match:
return get_setup_file(check_name), match.group(1)


@click.command('package', context_settings=CONTEXT_SETTINGS, short_help='Validate Python package metadata')
@click.argument('check', autocompletion=complete_valid_checks, required=False)
def package(check):
"""Validate all `setup.py` files.
"""Validate all files for Python package metadata.
If `check` is specified, only the check will be validated, if check value is 'changed' will only apply to changed
checks, an 'all' or empty `check` value will validate all README files.
checks, an 'all' or empty `check` value will validate all files.
"""

checks = process_checks_option(check, source='valid_checks', validate=True)
echo_info(f"Validating setup.py files for {len(checks)} checks ...")
echo_info(f'Validating files for {len(checks)} checks ...')

failed_checks = 0
ok_checks = 0

for check in checks:
display_queue = []
file_failed = False
setup_file_path = get_setup_file(check)
if check in EXCLUDE_CHECKS:
continue

lines = read_setup_file(check)
for _, line in lines:
# The name field must match the pattern: `datadog-<folder_name>`
match = re.search("name=['\"](.*)['\"]", line)
if match:
group = match.group(1)
# Following PEP 503, lets normalize the groups and validate those
# https://www.python.org/dev/peps/pep-0503/#normalized-names
group = normalize_package_name(group)
normalized_package_name = normalize_package_name(f"datadog-{check}")
if group != normalized_package_name:
file_failed = True
display_queue.append(
(echo_failure, f" The name in setup.py: {group} must be: `{normalized_package_name}`")
)
source, project_name = read_project_name(check)
normalization_function = normalize_project_name if has_project_file(check) else normalize_package_name
project_name = normalization_function(project_name)
normalized_project_name = normalization_function(f'datadog-{check}')
# The name field must match the pattern: `datadog-<folder_name>`
if project_name != normalized_project_name:
file_failed = True
display_queue.append(
(
echo_failure,
f' The name in {basepath(source)}: {project_name} must be: `{normalized_project_name}`',
)
)

if file_failed:
failed_checks += 1
# Display detailed info if file is invalid
echo_info(f'{check}... ', nl=False)
echo_failure(' FAILED')
annotate_display_queue(setup_file_path, display_queue)
annotate_display_queue(source, display_queue)
for display_func, message in display_queue:
display_func(message)
else:
Expand Down
94 changes: 86 additions & 8 deletions datadog_checks_dev/datadog_checks/dev/tooling/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
import os
from collections import defaultdict
from copy import deepcopy

from packaging.requirements import InvalidRequirement, Requirement

from ..fs import stream_file_lines
from .constants import get_agent_requirements, get_root
from .utils import get_valid_checks
from .utils import (
get_project_file,
get_valid_checks,
has_project_file,
load_project_file_at_cached,
load_project_file_cached,
write_project_file_at,
)


class DependencyDefinition:
Expand Down Expand Up @@ -38,7 +46,22 @@ def create_dependency_data():
return defaultdict(lambda: defaultdict(lambda: []))


def load_dependency_data(req_file, dependencies, errors, check_name=None):
def load_dependency_data_from_metadata(check_name, dependencies, errors):
project_data = load_project_file_cached(check_name)
check_dependencies = project_data['project'].get('optional-dependencies', {}).get('deps', [])
for check_dependency in check_dependencies:
try:
req = Requirement(check_dependency)
except InvalidRequirement as e:
errors.append(f'File `pyproject.toml` has an invalid dependency: `{check_dependency}`\n{e}')
continue

name = req.name.lower().replace('_', '-')
dependency = dependencies[name][req.specifier]
dependency.append(DependencyDefinition(name, req, get_project_file(check_name), None, check_name))


def load_dependency_data_from_requirements(req_file, dependencies, errors, check_name=None):
for i, line in enumerate(stream_file_lines(req_file)):
line = line.strip()
if not line or line.startswith('#'):
Expand All @@ -55,7 +78,26 @@ def load_dependency_data(req_file, dependencies, errors, check_name=None):
dependency.append(DependencyDefinition(name, req, req_file, i, check_name))


def load_base_check(req_file, dependencies, errors, check_name=None):
def load_base_check(check_name, dependencies, errors):
project_data = load_project_file_cached(check_name)
check_dependencies = project_data['project'].get('dependencies', [])
for check_dependency in check_dependencies:
try:
req = Requirement(check_dependency)
except InvalidRequirement as e:
errors.append(f'File `pyproject.toml` has an invalid dependency: `{check_dependency}`\n{e}')
continue

name = req.name.lower().replace('_', '-')
if name == 'datadog-checks-base':
dependency = dependencies[name][req.specifier]
dependency.append(DependencyDefinition(name, req, get_project_file(check_name), None, check_name))
break
else:
errors.append(f'File `{check_name}/pyproject.toml` missing base check dependency `datadog-checks-base`')


def load_base_check_legacy(req_file, dependencies, errors, check_name=None):
for i, line in enumerate(stream_file_lines(req_file)):
line = line.strip()
if line.startswith('CHECKS_BASE_REQ'):
Expand Down Expand Up @@ -86,8 +128,11 @@ def read_check_dependencies(check=None):
checks = sorted(get_valid_checks()) if check is None else [check]

for check_name in checks:
req_file = os.path.join(root, check_name, 'requirements.in')
load_dependency_data(req_file, dependencies, errors, check_name)
if has_project_file(check_name):
load_dependency_data_from_metadata(check_name, dependencies, errors)
else:
req_file = os.path.join(root, check_name, 'requirements.in')
load_dependency_data_from_requirements(req_file, dependencies, errors, check_name)

return dependencies, errors

Expand All @@ -105,16 +150,49 @@ def read_check_base_dependencies(check=None):
for check_name in checks:
if check_name.startswith('datadog_checks_'):
continue
req_file = os.path.join(root, check_name, 'setup.py')
load_base_check(req_file, dependencies, errors, check_name)

if has_project_file(check_name):
load_base_check(check_name, dependencies, errors)
else:
req_file = os.path.join(root, check_name, 'setup.py')
load_base_check_legacy(req_file, dependencies, errors, check_name)

return dependencies, errors


def update_check_dependencies_at(path, dependency_definitions):
project_data = deepcopy(load_project_file_at_cached(path))
dependencies = project_data['project'].setdefault('optional-dependencies', {}).setdefault('deps', [])

updated = False
for i, old_dependency in enumerate(dependencies):
old_requirement = Requirement(old_dependency)

for dependency_definition in dependency_definitions:
new_requirement = dependency_definition.requirement
if new_requirement.name == old_requirement.name:
if str(new_requirement) != str(old_requirement):
dependencies[i] = str(new_requirement)
updated = True

break

if updated:
# sort, and prevent backslash escapes since strings are written using double quotes
dependencies[:] = sorted(str(dependency).replace('"', "'") for dependency in dependencies)
write_project_file_at(path, project_data)

return updated


def update_check_dependencies(check_name, dependency_definitions):
update_check_dependencies_at(get_project_file(check_name), dependency_definitions)


def read_agent_dependencies():
dependencies = create_dependency_data()
errors = []

load_dependency_data(get_agent_requirements(), dependencies, errors)
load_dependency_data_from_requirements(get_agent_requirements(), dependencies, errors)

return dependencies, errors
Loading

0 comments on commit 01c78dc

Please sign in to comment.