Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
ewjoachim committed Jan 1, 2024
1 parent 055b09f commit fb8f291
Show file tree
Hide file tree
Showing 19 changed files with 1,196 additions and 643 deletions.
9 changes: 9 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,15 @@ inputs:
the badge will be orange. Otherwise it will be red.
default: 70
required: false
MAX_FILES_IN_COMMENT:
description: >
Maximum number of files to display in the comment. If there are more
files than this number, they will only appear in the workflow summary.
The selected files are the ones with the most new uncovered lines. The
closer this number gets to 35, the higher the risk that it reaches
GitHub's maximum comment size limit of 65536 characters.
default: 25
required: false
MERGE_COVERAGE_FILES:
description: >
If true, will run `coverage combine` before reading the `.coverage` file.
Expand Down
8 changes: 5 additions & 3 deletions coverage_comment/badge.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ def get_badge_color(

def get_evolution_badge_color(
delta: decimal.Decimal | int,
up_is_good: bool,
neutral_color: str = "grey",
up_is_good: bool = True,
neutral_color: str = "lightgrey",
) -> str:
if delta == 0:
return neutral_color
Expand Down Expand Up @@ -67,8 +67,10 @@ def compute_badge_image(


def get_static_badge_url(label: str, message: str, color: str) -> str:
if not color or not message:
raise ValueError("color and message are required")
code = "-".join(
e.replace("_", "__").replace("-", "--") for e in (label, message, color)
e.replace("_", "__").replace("-", "--") for e in (label, message, color) if e
)
return "https://img.shields.io/badge/" + urllib.parse.quote(f"{code}.svg")

Expand Down
28 changes: 5 additions & 23 deletions coverage_comment/coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,9 @@
import dataclasses
import datetime
import decimal
import functools
import itertools
import json
import pathlib
from collections.abc import Iterable, Sequence
from collections.abc import Sequence

from coverage_comment import log, subprocess

Expand Down Expand Up @@ -74,10 +72,6 @@ class FileDiffCoverage:
def violation_lines(self) -> list[int]:
return self.missing_statements

@functools.cached_property
def violation_lines_collapsed(self):
return list(collapse_lines(self.violation_lines))


@dataclasses.dataclass
class DiffCoverage:
Expand Down Expand Up @@ -201,10 +195,8 @@ def extract_info(data: dict, coverage_path: pathlib.Path) -> Coverage:
covered_lines=file_data["summary"]["covered_lines"],
num_statements=file_data["summary"]["num_statements"],
percent_covered=compute_coverage(
file_data["summary"]["covered_lines"]
+ file_data["summary"].get("covered_branches", 0),
file_data["summary"]["num_statements"]
+ file_data["summary"].get("num_branches", 0),
file_data["summary"]["covered_lines"],
file_data["summary"]["num_statements"],
),
missing_lines=file_data["summary"]["missing_lines"],
excluded_lines=file_data["summary"]["excluded_lines"],
Expand All @@ -222,10 +214,8 @@ def extract_info(data: dict, coverage_path: pathlib.Path) -> Coverage:
covered_lines=data["totals"]["covered_lines"],
num_statements=data["totals"]["num_statements"],
percent_covered=compute_coverage(
data["totals"]["covered_lines"]
+ data["totals"].get("covered_branches", 0),
data["totals"]["num_statements"]
+ data["totals"].get("num_branches", 0),
data["totals"]["covered_lines"],
data["totals"]["num_statements"],
),
missing_lines=data["totals"]["missing_lines"],
excluded_lines=data["totals"]["excluded_lines"],
Expand Down Expand Up @@ -328,11 +318,3 @@ def parse_line_number_diff_line(line: str) -> Sequence[int]:
"""
start, length = (int(i) for i in (line.split()[2][1:] + ",1").split(",")[:2])
return range(start, start + length)


def collapse_lines(lines: list[int]) -> Iterable[tuple[int, int]]:
# All consecutive line numbers have the same difference between their list index and their value.
# Grouping by this difference therefore leads to buckets of consecutive numbers.
for _, it in itertools.groupby(enumerate(lines), lambda x: x[1] - x[0]):
t = list(it)
yield t[0][1], t[-1][1]
69 changes: 5 additions & 64 deletions coverage_comment/diff_grouper.py
Original file line number Diff line number Diff line change
@@ -1,77 +1,17 @@
from __future__ import annotations

import dataclasses
import functools
import itertools
import pathlib
from collections.abc import Iterable

from coverage_comment import coverage as coverage_module
from coverage_comment import groups

MAX_ANNOTATION_GAP = 3


@dataclasses.dataclass(frozen=True)
class Group:
file: pathlib.Path
line_start: int
line_end: int


def compute_contiguous_groups(
values: list[int], separators: set[int], joiners: set[int]
) -> list[tuple[int, int]]:
"""
Given a list of (sorted) values, a list of separators and a list of
joiners, return a list of ranges (start, included end) describing groups of
values.
Groups are created by joining contiguous values together, and in some cases
by merging groups, enclosing a gap of values between them. Gaps that may be
enclosed are small gaps (<= MAX_ANNOTATION_GAP values after removing all
joiners) where no line is a "separator"
"""
contiguous_groups: list[tuple[int, int]] = []
for _, contiguous_group in itertools.groupby(
zip(values, itertools.count(1)), lambda x: x[1] - x[0]
):
grouped_values = (e[0] for e in contiguous_group)
first = next(grouped_values)
try:
*_, last = grouped_values
except ValueError:
last = first
contiguous_groups.append((first, last))

def reducer(
acc: list[tuple[int, int]], group: tuple[int, int]
) -> list[tuple[int, int]]:
if not acc:
return [group]

last_group = acc[-1]
last_start, last_end = last_group
next_start, next_end = group

gap = set(range(last_end + 1, next_start)) - joiners

gap_is_small = len(gap) <= MAX_ANNOTATION_GAP
gap_contains_separators = gap & separators

if gap_is_small and not gap_contains_separators:
acc[-1] = (last_start, next_end)
return acc

acc.append(group)
return acc

return functools.reduce(reducer, contiguous_groups, [])


def get_diff_missing_groups(
coverage: coverage_module.Coverage,
diff_coverage: coverage_module.DiffCoverage,
) -> Iterable[Group]:
) -> Iterable[groups.Group]:
for path, diff_file in diff_coverage.files.items():
coverage_file = coverage.files[path]

Expand All @@ -87,12 +27,13 @@ def get_diff_missing_groups(
# they are separators.
joiners = set(diff_file.added_lines) - separators

for start, end in compute_contiguous_groups(
for start, end in groups.compute_contiguous_groups(
values=diff_file.missing_statements,
separators=separators,
joiners=joiners,
max_gap=MAX_ANNOTATION_GAP,
):
yield Group(
yield groups.Group(
file=path,
line_start=start,
line_end=end,
Expand Down
63 changes: 63 additions & 0 deletions coverage_comment/groups.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from __future__ import annotations

import dataclasses
import functools
import itertools
import pathlib


@dataclasses.dataclass(frozen=True)
class Group:
file: pathlib.Path
line_start: int
line_end: int


def compute_contiguous_groups(
values: list[int], separators: set[int], joiners: set[int], max_gap: int
) -> list[tuple[int, int]]:
"""
Given a list of (sorted) values, a list of separators and a list of
joiners, return a list of ranges (start, included end) describing groups of
values.
Groups are created by joining contiguous values together, and in some cases
by merging groups, enclosing a gap of values between them. Gaps that may be
enclosed are small gaps (<= max_gap values after removing all joiners)
where no line is a "separator"
"""
contiguous_groups: list[tuple[int, int]] = []
for _, contiguous_group in itertools.groupby(
zip(values, itertools.count(1)), lambda x: x[1] - x[0]
):
grouped_values = (e[0] for e in contiguous_group)
first = next(grouped_values)
try:
*_, last = grouped_values
except ValueError:
last = first
contiguous_groups.append((first, last))

def reducer(
acc: list[tuple[int, int]], group: tuple[int, int]
) -> list[tuple[int, int]]:
if not acc:
return [group]

last_group = acc[-1]
last_start, last_end = last_group
next_start, next_end = group

gap = set(range(last_end + 1, next_start)) - joiners

gap_is_small = len(gap) <= max_gap
gap_contains_separators = gap & separators

if gap_is_small and not gap_contains_separators:
acc[-1] = (last_start, next_end)
return acc

acc.append(group)
return acc

return functools.reduce(reducer, contiguous_groups, [])
10 changes: 10 additions & 0 deletions coverage_comment/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,12 +156,22 @@ def process_pr(
)

marker = template.get_marker(marker_id=config.SUBPROJECT_ID)

files_info, count_files = template.select_files(
coverage=coverage,
diff_coverage=diff_coverage,
previous_coverage=previous_coverage,
max_files=config.MAX_FILES_IN_COMMENT,
)
try:
comment = template.get_comment_markdown(
coverage=coverage,
diff_coverage=diff_coverage,
previous_coverage=previous_coverage,
previous_coverage_rate=previous_coverage_rate,
files=files_info,
count_files=count_files,
max_files=config.MAX_FILES_IN_COMMENT,
minimum_green=config.MINIMUM_GREEN,
minimum_orange=config.MINIMUM_ORANGE,
repo_name=config.GITHUB_REPOSITORY,
Expand Down
1 change: 1 addition & 0 deletions coverage_comment/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ class Config:
MERGE_COVERAGE_FILES: bool = False
ANNOTATE_MISSING_LINES: bool = False
ANNOTATION_TYPE: str = "warning"
MAX_FILES_IN_COMMENT: int = 25
VERBOSE: bool = False
# Only for debugging, not exposed in the action:
FORCE_WORKFLOW_RUN: bool = False
Expand Down
Loading

0 comments on commit fb8f291

Please sign in to comment.