Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use snapshot ID if defined #77

Merged
merged 31 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
56ebeb7
Use snapshot if defined
JokeWaumans Aug 21, 2024
a45550e
Test get_defects with snapshot ID
JokeWaumans Aug 21, 2024
d01961b
Add COVERITY_SNAPSHOT to env
JokeWaumans Aug 21, 2024
be7f2df
Fix flake8
JokeWaumans Aug 22, 2024
80a3a31
Merge branch 'REST-API' into snapshots
JokeWaumans Aug 22, 2024
a8b1bcc
Use the same fake_checkers
JokeWaumans Aug 22, 2024
6943161
Add fake_snapshot variable to test
JokeWaumans Aug 22, 2024
d392e50
Make oneliner from multiliner
JokeWaumans Aug 22, 2024
bd68645
Set logging level to WARNING in conf.py
JokeWaumans Aug 22, 2024
0676444
Merge branch 'REST-API' into snapshots
JokeWaumans Aug 23, 2024
1a141f4
Use if-else statement to avoid problems with static analysis checker
JokeWaumans Aug 23, 2024
72a6cb1
Move new arguments to the end in case it is already used
JokeWaumans Aug 23, 2024
43e2bdc
Fix argument order of assert_called_with
JokeWaumans Aug 23, 2024
b55fc05
Merge branch 'master' of https://github.com/melexis/sphinx-coverity-e…
JasperCraeghs Aug 23, 2024
0c9e1c2
Use one-liner for if-else statement
JokeWaumans Sep 2, 2024
aedbafc
Fix typo
JokeWaumans Sep 3, 2024
cc4d1cc
Fix typo in test
JokeWaumans Sep 3, 2024
7807ab4
Fix f-string
JokeWaumans Sep 3, 2024
1c82f68
Take global LOGGER from coverity_logging to set logging level
JokeWaumans Sep 3, 2024
b08f5c8
Fix typo to set default value if not exists
JokeWaumans Sep 3, 2024
26b647a
Update docstring
JokeWaumans Sep 5, 2024
cc38b89
Snapshot defaults to empty string for backwards compatibility
JokeWaumans Sep 5, 2024
d63a705
Merge branch 'snapshots' of github.com:melexis/sphinx-coverity-extens…
JokeWaumans Sep 5, 2024
3346010
Continue with the latest snapshot if the snapshot does not exist
JokeWaumans Sep 5, 2024
6eb40c2
Refactor loggings
JokeWaumans Sep 5, 2024
b8793b8
Delete done in perform_replacement
JokeWaumans Sep 5, 2024
f9b6491
Return valid snapshot to use it later
JokeWaumans Sep 9, 2024
40f075f
Add trailing comma to add data easily
JokeWaumans Sep 9, 2024
eadbb08
Update test with snapshot="last()" if snapshot is not defined/valid
JokeWaumans Sep 9, 2024
113dff9
Merge branch 'snapshots' of github.com:melexis/sphinx-coverity-extens…
JokeWaumans Sep 9, 2024
e15dd57
Merge branch 'master' of https://github.com/melexis/sphinx-coverity-e…
JasperCraeghs Sep 9, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ env:
COVERITY_PASSWORD: dummy
COVERITY_STREAM: dummy
COVERITY_USERNAME: dummy
COVERITY_SNAPSHOT: dummy

jobs:
test:
Expand Down
1 change: 1 addition & 0 deletions example/.env.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
COVERITY_USERNAME = 'yourusername'
COVERITY_PASSWORD = 'yourpassword'
COVERITY_STREAM = 'yourstream'
COVERITY_SNAPSHOT = ''

2 changes: 1 addition & 1 deletion example/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ BUILDDIR ?= _build

# logging variables
DEBUG ?= 0
LOGLEVEL =? WARNING
LOGLEVEL ?= WARNING

# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
Expand Down
7 changes: 3 additions & 4 deletions example/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,10 @@
import sys

import mlx.coverity
from mlx.coverity import __version__
from mlx.coverity import __version__, coverity_logging
import mlx.traceability
from decouple import config
import logging
from sphinx.util.logging import getLogger

pkg_version = __version__

Expand Down Expand Up @@ -315,6 +314,7 @@
"username": config("COVERITY_USERNAME"),
"password": config("COVERITY_PASSWORD"),
"stream": config("COVERITY_STREAM"),
"snapshot": config("COVERITY_SNAPSHOT")
JokeWaumans marked this conversation as resolved.
Show resolved Hide resolved
}

TRACEABILITY_ITEM_ID_REGEX = r"([A-Z_]+-[A-Z0-9_]+)"
Expand All @@ -324,8 +324,7 @@
if log_level:
try:
numeric_level = getattr(logging, log_level.upper(), None)
logger = getLogger("mlx.coverity_logging")
logger.setLevel(level=numeric_level)
coverity_logging.LOGGER.setLevel(level=numeric_level)
except:
raise ValueError(f"Invalid log level: {log_level}")

25 changes: 17 additions & 8 deletions mlx/coverity/coverity.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def initialize_environment(self, app):
\\makeatother"""

self.stream = app.config.coverity_credentials["stream"]

self.snapshot = app.config.coverity_credentials.get("snapshot", "")
# Login to Coverity and obtain stream information
try:
self.input_credentials(app.config.coverity_credentials)
Expand All @@ -58,15 +58,19 @@ def initialize_environment(self, app):
app.config.coverity_credentials["username"], app.config.coverity_credentials["password"]
)
report_info("done")
report_info("Verify the given stream name... ", True)
report_info("Verify the given stream name... ")
self.coverity_service.validate_stream(self.stream)
report_info("done")
if self.snapshot:
report_info("Verify the given snapshot ID and obtain all enabled checkers... ")
self.coverity_service.validate_snapshot(self.snapshot)
report_info("done")
# Get all column keys
report_info("obtaining all column keys... ", True)
report_info("obtaining all column keys... ")
self.coverity_service.retrieve_column_keys()
report_info("done")
# Get all checkers
report_info("obtaining all checkers... ", True)
report_info("obtaining all checkers... ")
self.coverity_service.retrieve_checkers()
report_info("done")
except (URLError, HTTPError, Exception, ValueError) as error_info: # pylint: disable=broad-except
Expand Down Expand Up @@ -100,7 +104,13 @@ def process_coverity_nodes(self, app, doctree, fromdocname):
# Get items from server
try:
defects = self.get_filtered_defects(node)
node.perform_replacement(defects, self, app, fromdocname)
if defects["totalRows"] == -1:
error_message = "There are no defects with the specified filters"
report_warning(error_message, fromdocname, lineno=node["line"])
else:
report_info("building defects table and/or chart... ", True)
node.perform_replacement(defects, self, app, fromdocname)
report_info("done")
except (URLError, AttributeError, Exception) as err: # pylint: disable=broad-except
error_message = f"failed to process coverity-list with {err!r}"
report_warning(error_message, fromdocname, lineno=node["line"])
Expand Down Expand Up @@ -138,13 +148,12 @@ def get_filtered_defects(self, node):
"rows": [list of dictionaries {"key": <key>, "value": <value>}]
}
"""
report_info("obtaining defects... ", True)
report_info("obtaining defects... ")
column_names = set(node["col"])
if "chart_attribute" in node and node["chart_attribute"].upper() in node.column_map:
column_names.add(node["chart_attribute"])
defects = self.coverity_service.get_defects(self.stream, node["filters"], column_names)
defects = self.coverity_service.get_defects(self.stream, node["filters"], column_names, self.snapshot)
report_info("%d received" % (defects["totalRows"]))
report_info("building defects table and/or chart... ", True)
return defects


Expand Down
1 change: 0 additions & 1 deletion mlx/coverity/coverity_directives/coverity_defect_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ def perform_replacement(self, defects, connector, app, fromdocname):
self._prepare_labels_and_values(combined_labels, defects["totalRows"])
top_node += self.build_pie_chart(env)

report_info("done")
self.replace_self(top_node)

def initialize_table(self):
Expand Down
51 changes: 36 additions & 15 deletions mlx/coverity/coverity_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import requests
from sphinx.util.logging import getLogger

from mlx.coverity import report_info
from mlx.coverity import report_info, report_warning

# Coverity built in Impact statuses
IMPACT_LIST = ["High", "Medium", "Low"]
Expand Down Expand Up @@ -53,6 +53,7 @@ def __init__(self, hostname):
self._checkers = []
self._columns = {}
self.logger = getLogger("mlx.coverity_logging")
self.valid_snapshot = False

@property
def base_url(self):
Expand Down Expand Up @@ -125,6 +126,23 @@ def validate_stream(self, stream):
url = f"{self.api_endpoint}/streams/{stream}"
self._request(url)

def validate_snapshot(self, snapshot):
"""Validate snapshot by retrieving the specified snapshot.
When the request fails, the snapshot does not exist or the user does not have acces to it.
In this case a warning is logged and continues with the latest snapshot.

Args:
snapshot (str): The snapshot ID
"""
url = f"{self.api_endpoint}/snapshots/{snapshot}"
response = self.session.get(url)
if response.ok:
self.valid_snapshot = True
report_info(f"Snapshot ID {snapshot} is valid")
else:
report_warning(f"No snapshot found for ID {snapshot}; Continue with using the latest snapshot.", "")
self.valid_snapshot = False
JasperCraeghs marked this conversation as resolved.
Show resolved Hide resolved

def retrieve_issues(self, filters):
"""Retrieve issues from the server (Coverity Connect).

Expand Down Expand Up @@ -200,7 +218,7 @@ def _request(self, url, data=None):
err_msg = response.json()["message"]
except (requests.exceptions.JSONDecodeError, KeyError):
err_msg = response.content.decode()
self.logger.warning(err_msg)
self.logger.error(err_msg)
return response.raise_for_status()

def assemble_query_filter(self, column_name, filter_values, matcher_type):
Expand Down Expand Up @@ -236,8 +254,10 @@ def assemble_query_filter(self, column_name, filter_values, matcher_type):
"matchers": matchers
}

def get_defects(self, stream, filters, column_names):
"""Gets a list of defects for given stream, filters and column names.
def get_defects(self, stream, filters, column_names, snapshot):
"""Gets a list of defects for the given stream, filters and column names.

If no snapshot ID is given, the last snapshot is taken.
If a column name does not match the name of the `columns` property, the column can not be obtained because
it need the correct corresponding column key.
Column key `cid` is always obtained to use later in other functions.
Expand All @@ -246,6 +266,7 @@ def get_defects(self, stream, filters, column_names):
stream (str): Name of the stream to query
filters (dict): Dictionary with attribute names as keys and CSV lists of attribute values to query as values
column_names (list[str]): The column names
snapshot (str): The snapshot ID; If empty the last snapshot is taken.

Returns:
dict: The content of the request. This has a structure like:
Expand All @@ -256,7 +277,7 @@ def get_defects(self, stream, filters, column_names):
"rows": list of [list of dictionaries {"key": <key>, "value": <value>}]
}
"""
report_info(f"Querying Coverity for defects in stream [{stream}] ...",)
report_info(f"Querying Coverity for defects in stream [{stream}] ...")
query_filters = [
{
"columnKey": "streams",
Expand Down Expand Up @@ -291,23 +312,23 @@ def get_defects(self, stream, filters, column_names):
if (filter := filters["component"]) and (filter_values := self.handle_component_filter(filter)):
query_filters.append(self.assemble_query_filter("Component", filter_values, "nameMatcher"))

scope = snapshot if snapshot and self.valid_snapshot else "last()"

data = {
"filters": query_filters,
"columns": list(self.column_keys(column_names)),
"snapshotScope": {
"show": {
"scope": "last()",
"includeOutdatedSnapshots": False
},
"compareTo": {
"scope": "last()",
"scope": scope,
"includeOutdatedSnapshots": False
}
}
}

report_info("Running Coverity query...")
return self.retrieve_issues(data)
defects_data = self.retrieve_issues(data)
report_info("done")

return defects_data

def handle_attribute_filter(self, attribute_values, name, valid_attributes, allow_regex=False):
"""Process the given CSV list of attribute values by filtering out the invalid ones while logging an error.
Expand All @@ -322,11 +343,11 @@ def handle_attribute_filter(self, attribute_values, name, valid_attributes, allo
Returns:
set[str]: The attributes values to query with
"""
report_info(f"Using {name} filter [{attribute_values}]")
report_info(f"Using {name!r} filter [{attribute_values}]")
filter_values = set()
for field in attribute_values.split(","):
if not valid_attributes or field in valid_attributes:
report_info("Classification [{field}] is valid")
report_info(f"Classification [{field}] is valid")
filter_values.add(field)
elif allow_regex:
pattern = re.compile(field)
Expand All @@ -346,7 +367,7 @@ def handle_component_filter(self, attribute_values):
Returns:
list[str]: The list of attributes
"""
report_info(f"Using Component filter [{attribute_values}]")
report_info(f"Using 'Component' filter [{attribute_values}]")
parser = csv.reader([attribute_values])
filter_values = []
for fields in parser:
Expand Down
58 changes: 50 additions & 8 deletions tests/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@
],
"columns": ["cid"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand Down Expand Up @@ -74,8 +73,7 @@
],
"columns": ["cid", "checker", "lastTriageComment", "classification"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand All @@ -102,8 +100,7 @@
],
"columns": ["status", "cid", "checker", "lastTriageComment"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand Down Expand Up @@ -135,8 +132,53 @@
],
"columns": ["cid", "classification", "action"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)

test_snapshot = Filter(
{
"checker": "MISRA",
"impact": None,
"kind": None,
"classification": "Intentional,Bug,Pending,Unclassified",
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
["CID", "Classification", "Checker", "Comment"],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
},
{
"columnKey": "checker",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "MISRA 2 KEY"},
{"type": "keyMatcher", "key": "MISRA 1"},
{"type": "keyMatcher", "key": "MISRA 3"},
],
},
{
"columnKey": "classification",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "Bug"},
{"type": "keyMatcher", "key": "Pending"},
{"type": "keyMatcher", "key": "Unclassified"},
{"type": "keyMatcher", "key": "Intentional"},
],
},
],
"columns": ["cid", "checker", "lastTriageComment", "classification"],
"snapshotScope": {
"show": {"scope": "123", "includeOutdatedSnapshots": False}
},
},
)
Loading
Loading