Skip to content

Commit

Permalink
test: adding tokens and previous exercice validation
Browse files Browse the repository at this point in the history
  • Loading branch information
Marko MITROVIC authored and Marko MITROVIC committed Jan 24, 2025
1 parent 66fae10 commit 6768828
Show file tree
Hide file tree
Showing 6 changed files with 246 additions and 104 deletions.
214 changes: 135 additions & 79 deletions github_tests_validator_app/bin/github_repo_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,15 @@
GH_WORKFLOWS_FOLDER_NAME,
commit_ref_path,
default_message,
base_tokens,
required_checks
)
from github_tests_validator_app.lib.utils import pull_requested_test_results

from github_tests_validator_app.lib.utils import (
pull_requested_test_results,
validate_and_assign_token
)

from github_tests_validator_app.lib.connectors.github_client import GitHubConnector
from github_tests_validator_app.lib.connectors.sqlalchemy_client import SQLAlchemyConnector, User

Expand Down Expand Up @@ -162,11 +169,7 @@ def validate_github_repo(
github_event=event,
user_github_connector=user_github_connector
)
logging.info(f"failed_test : {failed_tests[1]}")
pytest_result_conclusion = "failure" if failed_tests[1] > 0 else "success"
logging.info(f"pytest_result_conclusion 01 = {pytest_result_conclusion}")

logging.info(f"pytest_result_conclusion = {pytest_result_conclusion}")


sql_client.add_new_repository_validation(
Expand All @@ -185,89 +188,142 @@ def validate_github_repo(
default_message["valid_repository"]["tests"][str(tests_havent_changed)],
)


if event == "pull_request":
# Create a Check Run with detailed test results in case of failure
user_github_connector.repo.create_check_run(
name="[Integrity] Test Folder Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=tests_conclusion,
output={
"title": "Test Folder Validation Result",
"summary": tests_message,
}
)
user_github_connector.repo.create_check_run(
name="[Integrity] Workflow Folder Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=workflows_conclusion,
output={
"title": "Workflow Folder Validation Result",
"summary": workflows_message,
}
)
pytest_result_message = pull_requested_test_results(
tests_results_json=pytests_results_json,
payload=payload,
github_event=event,
user_github_connector=user_github_connector
)
user_github_connector.repo.create_check_run(
name="[Pytest] Pytest Result Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=pytest_result_conclusion,
output={
"title": "Pytest Validation Result",
"summary": pytest_result_message[0],
}
)
try :
# Create a Check Run with detailed test results in case of failure
user_github_connector.repo.create_check_run(
name="[Integrity] Test Folder Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=tests_conclusion,
output={
"title": "Test Folder Validation Result",
"summary": tests_message,
}
)
user_github_connector.repo.create_check_run(
name="[Integrity] Workflow Folder Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=workflows_conclusion,
output={
"title": "Workflow Folder Validation Result",
"summary": workflows_message,
}
)
pytest_result_message = pull_requested_test_results(
tests_results_json=pytests_results_json,
payload=payload,
github_event=event,
user_github_connector=user_github_connector
)
user_github_connector.repo.create_check_run(
name="[Pytest] Pytest Result Validation",
head_sha=payload["pull_request"]["head"]["sha"],
status="completed",
conclusion=pytest_result_conclusion,
output={
"title": "Pytest Validation Result",
"summary": pytest_result_message[0],
}
)
except Exception as e :
logging.error(f"Error creating check run: {e}")

# All exercice need to be validated for -> Token
part, token = validate_and_assign_token(sha=payload["pull_request"]["head"]["sha"],
tokens=base_tokens,
user_github_connector=user_github_connector,
required_checks=required_checks)

if part and token:
pr_number = payload["pull_request"]["number"]
comment_message = (
f"🎉 Congratulations! You've validated all exercises for {part}. "
f"Here is your token: `{token}`"
)
# Post a comment in the PR
user_github_connector.repo.get_pull(pr_number).create_issue_comment(comment_message)
logging.info(f"Posted comment to PR #{pr_number}: {comment_message}")
else:
logging.info("Not all exercises are validated. No token assigned.")

elif event == "pusher":
# Check if there is already an open PR
gh_branch = payload["ref"].replace("refs/heads/", "")
gh_prs = user_github_connector.repo.get_pulls(
state="open",
head=f"{user_github_connector.repo.owner.login}:{gh_branch}"
)
logging.info(f"HEAD SHA for check runs: {payload['after']}")

if gh_prs.totalCount > 0:
gh_pr = gh_prs[0] # Get first matching PR
if gh_pr.head.sha == payload["after"]:
return
logging.info("SHA matches an open PR; skipping duplicate processing.")

user_github_connector.repo.create_check_run(
name="[Integrity] Test Folder Validation",
head_sha=payload["after"],
status="completed",
conclusion=tests_conclusion,
output={
"title": "Test Folder Validation Result",
"summary": tests_message,
}
)
user_github_connector.repo.create_check_run(
name="[Integrity] Workflow Folder Validation",
head_sha=payload["after"],
status="completed",
conclusion=workflows_conclusion,
output={
"title": "Workflow Folder Validation Result",
"summary": workflows_message,
}
)
pytest_result_message = pull_requested_test_results(
tests_results_json=pytests_results_json,
payload=payload,
github_event=event,
user_github_connector=user_github_connector
)
user_github_connector.repo.create_check_run(
name="[Pytest] Pytest Result Validation",
head_sha=payload["after"],
status="completed",
conclusion=pytest_result_conclusion,
output={
"title": "Pytest Validation Result",
"summary": pytest_result_message[0],
}
)
try :
user_github_connector.repo.create_check_run(
name="[Integrity] Test Folder Validation",
head_sha=payload["after"],
status="completed",
conclusion=tests_conclusion,
output={
"title": "Test Folder Validation Result",
"summary": tests_message,
}
)
user_github_connector.repo.create_check_run(
name="[Integrity] Workflow Folder Validation",
head_sha=payload["after"],
status="completed",
conclusion=workflows_conclusion,
output={
"title": "Workflow Folder Validation Result",
"summary": workflows_message,
}
)
pytest_result_message = pull_requested_test_results(
tests_results_json=pytests_results_json,
payload=payload,
github_event=event,
user_github_connector=user_github_connector
)
user_github_connector.repo.create_check_run(
name="[Pytest] Pytest Result Validation",
head_sha=payload["after"],
status="completed",
conclusion=pytest_result_conclusion,
output={
"title": "Pytest Validation Result",
"summary": pytest_result_message[0],
}
)
except Exception as e:
logging.error(f"Error creating check run: {e}")

# All exercice need to be validated for -> Token
part, token = validate_and_assign_token(sha=payload["ref"],
tokens=base_tokens,
user_github_connector=user_github_connector,
required_checks=required_checks)

if part and token:
branch_ref = payload["ref"] # e.g., 'refs/heads/branch_name'

# Retrieve the pull request associated with the branch
open_prs = user_github_connector.repo.get_pulls(state="open", head=f"{user_github_connector.repo.owner.login}:{branch_ref}")
if open_prs.totalCount == 1:
pr_number = open_prs[0].number
comment_message = (
f"🎉 Congratulations! You've validated all exercises for {part}. "
f"Here is your token: `{token}`"
)
# Post a comment in the PR
open_prs[0].create_issue_comment(comment_message)
logging.info(f"Posted comment to PR #{pr_number}: {comment_message}")
else:
logging.warning(f"Could not determine a single open PR for branch {branch_ref}.")
else:
logging.info("Not all exercises are validated. No token assigned.")
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def send_user_pytest_summaries(

# Get user artifact
artifact = get_user_artifact(user_github_connector, sql_client, all_user_artifact, payload)
logging.info(f"User artifact: {artifact}")
# logging.info(f"User artifact: {artifact}")
if not artifact:
logging.info("[ERROR]: Cannot get user artifact.")
return
Expand Down
47 changes: 47 additions & 0 deletions github_tests_validator_app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,3 +58,50 @@
},
},
}

part_map = {
"Part 1": [
"validation_tests/test_01_git_101.py",
"validation_tests/test_02_notebooks_to_scripts.py",
"validation_tests/test_03_linting_and_formatting.py",
"validation_tests/test_04_continuous_integration.py",
"validation_tests/test_05_unit_testing.py",
"validation_tests/test_06_code_documentation.py",
"validation_tests/test_07_dependencies_and_venv.py"
# "validation_tests/test_08_packaging.py"
],
"Part 2": [
"validation_tests/test_09_ci_cd_pipelines.py",
"validation_tests/test_10_monitoring_and_alerting.py",
"validation_tests/test_11_logging.py",
"validation_tests/test_12_security_testing.py",
"validation_tests/test_13_performance_optimization.py"
]
}

base_tokens = {
"Part 1" : [
"FsQyRcFCNNzlUZpZ", "FyxfmqtAc8HCRLpx", "4VPhvLsrhJwfU3ee",
"V9D2DaQgesfMs9Fc", "CJCIPgYQud6Io1jD", "mLFjjkXsxbTb0VCw",
"9B8rMKEeR0p3gsJD", "i4M9CZuJQiwf8TKL", "JDn5CficECTa4JBN",
"AjMJBlYQyA2bxuXg", "NZ2BNJDcUQ8BZYxX", "eb8YEgo8yoTenrVS",
"WJbcGDT2Y7VjxNrZ", "oTzPvOupEY1eA3O9", "M0zAppk75VZEWAIx",
"HjzUp5L9IzYhRzdj", "A04FC2reSxdIgHaK", "DXM297sx4alfByVx",
"1G03TRqIamYRRNTF", "q78NrY9cESJESCBL"
],
"Part 2" : [
"Kh6MYcXAaQtWjKqn", "Diu0KzPzOU6Reced", "GB8DrumrkguJYDbm",
"9Saz9603Gv7fQxh9", "4toXjiNOa2jQmveY", "S798d7fOXpExDtpR",
"l3H2IALb5PziNqwZ", "1kaPHmA1I5o6fjyd", "032IKBCVNiWQdRYT",
"TL2kMMZL5aK8j6rW", "gPdv1ahPY1Pd8Q9T", "4XdS6r53tJ01vFOa",
"afHAgMIzQQI0HyDX", "SmINslEsh2OgAGGu", "RFFR6Z0Fmvsu5poQ",
"hTfmdvk9uilnlkIN", "chyPPvztjsZiYgYE", "bxp7NA9uFwCtdtRL",
"CzeYlSWdW2PgPpzk", "bcaTLaZQaCZx1zyJ"
]
}

required_checks = [
"[Pytest] Pytest Result Validation",
"[Integrity] Test Folder Validation",
"[Integrity] Workflow Folder Validation"
]
8 changes: 4 additions & 4 deletions github_tests_validator_app/lib/connectors/github_client.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Any, Dict, List, Union

import os
import io
import json
import time
Expand Down Expand Up @@ -113,9 +114,9 @@ def get_all_artifacts(self) -> Union[requests.models.Response, Any]:
for attempt in range(max_retries):
try:
response = self._request_data(url, headers=headers)
logging.info(f"Artifacts response: {response} from {url}")
# logging.info(f"Artifacts response: {response} from {url}")
if response and response.get("artifacts"):
logging.info(f"Artifacts fetched successfully on attempt {attempt+1}: {response}")
logging.info(f"Artifacts fetched successfully on attempt {attempt+1}")
return response
logging.warning(f"No artifacts found on attempt {attempt+1}/{max_retries}. Retrying in {delay}s...")
time.sleep(delay)
Expand Down Expand Up @@ -163,8 +164,7 @@ def get_artifact(self, artifact_info: Dict[str, Any]) -> Union[requests.models.R


def _get_headers(self) -> Dict[str, str]:
if not self.ACCESS_TOKEN:
self.set_access_token(self.REPO_NAME)
self.ACCESS_TOKEN = os.getenv("GH_PAT")

return {
"Accept": "application/vnd.github+json",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def add_new_pytest_summary(
branch: str,
info: str,
) -> None:
logging.info(f"Adding new pytest summary: {artifact}")
logging.info(f"Adding new pytest summary ...")
pytest_summary = WorkflowRun(
id=workflow_run_id,
organization_or_user=user_data["organization_or_user"],
Expand Down Expand Up @@ -147,7 +147,7 @@ def add_new_pytest_detail(
results: List[Dict[str, Any]],
workflow_run_id: int,
) -> None:
logging.info(f"Adding new pytest details: {results}")
logging.info(f"Adding new pytest details...")
with Session(self.engine) as session:
try:
for test in results:
Expand Down
Loading

0 comments on commit 6768828

Please sign in to comment.