Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .evergreen.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1131,6 +1131,15 @@ task_groups:
- e2e_om_ops_manager_prometheus
<<: *teardown_group

# Tests features only supported on OM80
- name: e2e_ops_manager_kind_8_0_only_task_group
max_hosts: -1
<<: *setup_group
<<: *setup_and_teardown_task
tasks:
- e2e_search_enterprise_tls
<<: *teardown_group

# Tests features only supported on OM70 and OM80, its only upgrade test as we test upgrading from 6 to 7 or 7 to 8
- name: e2e_ops_manager_upgrade_only_task_group
max_hosts: -1
Expand Down Expand Up @@ -1341,6 +1350,7 @@ buildvariants:
- name: e2e_ops_manager_kind_5_0_only_task_group_without_queryable_backup
- name: e2e_ops_manager_kind_6_0_only_task_group
- name: e2e_ops_manager_upgrade_only_task_group
- name: e2e_ops_manager_kind_8_0_only_task_group

- name: e2e_static_om80_kind_ubi
display_name: e2e_static_om80_kind_ubi
Expand Down
1 change: 0 additions & 1 deletion docker/mongodb-kubernetes-tests/kubetester/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def create_or_update_configmap(
data: Dict[str, str],
api_client: Optional[kubernetes.client.ApiClient] = None,
) -> str:
print("Logging inside create_or_update configmap")
try:
create_configmap(namespace, name, data, api_client)
except kubernetes.client.ApiException as e:
Expand Down
10 changes: 0 additions & 10 deletions docker/mongodb-kubernetes-tests/kubetester/kubetester.py
Original file line number Diff line number Diff line change
Expand Up @@ -978,16 +978,6 @@ def get_automation_status(group_id=None, group_name=None):

return response.json()

@staticmethod
def get_automation_status(group_id=None, group_name=None):
if group_id is None:
group_id = KubernetesTester.get_om_group_id(group_name=group_name)

url = build_automation_status_endpoint(KubernetesTester.get_om_base_url(), group_id)
response = KubernetesTester.om_request("get", url)

return response.json()

@staticmethod
def get_monitoring_config(group_id=None):
if group_id is None:
Expand Down
39 changes: 38 additions & 1 deletion docker/mongodb-kubernetes-tests/kubetester/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,18 @@ def __repr__(self):

def configure(
self,
om: MongoDBOpsManager,
om: Optional[MongoDBOpsManager],
project_name: str,
api_client: Optional[client.ApiClient] = None,
) -> MongoDB:
if om is not None:
return self.configure_ops_manager(om, project_name, api_client=api_client)
else:
return self.configure_cloud_qa(project_name, api_client=api_client)

def configure_ops_manager(
self,
om: Optional[MongoDBOpsManager],
project_name: str,
api_client: Optional[client.ApiClient] = None,
) -> MongoDB:
Expand All @@ -247,6 +258,29 @@ def configure(
self["spec"]["credentials"] = om.api_key_secret(self.namespace, api_client=api_client)
return self

def configure_cloud_qa(
self,
project_name,
api_client: Optional[client.ApiClient] = None,
) -> MongoDB:
if "opsManager" in self["spec"]:
del self["spec"]["opsManager"]

src_project_config_map_name = "my-project"
if "cloudManager" in self["spec"]:
src_project_config_map_name = self["spec"]["cloudManager"]["configMapRef"]["name"]

src_cm = read_configmap(self.namespace, src_project_config_map_name, api_client=api_client)

new_project_config_map_name = f"{self.name}-project-config"
ensure_nested_objects(self, ["spec", "cloudManager", "configMapRef"])
self["spec"]["cloudManager"]["configMapRef"]["name"] = new_project_config_map_name

src_cm.update({"projectName": f"{self.namespace}-{project_name}"})
create_or_update_configmap(self.namespace, new_project_config_map_name, src_cm, api_client=api_client)

return self

def configure_backup(self, mode: str = "enabled") -> MongoDB:
ensure_nested_objects(self, ["spec", "backup"])
self["spec"]["backup"]["mode"] = mode
Expand Down Expand Up @@ -449,6 +483,9 @@ def get_external_domain(self):
def config_map_name(self) -> str:
if "opsManager" in self["spec"]:
return self["spec"]["opsManager"]["configMapRef"]["name"]
elif "cloudManager" in self["spec"]:
return self["spec"]["cloudManager"]["configMapRef"]["name"]

return self["spec"]["project"]

def shard_replicaset_names(self) -> List[str]:
Expand Down
57 changes: 47 additions & 10 deletions docker/mongodb-kubernetes-tests/kubetester/omtester.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,22 @@
import requests
import semver
from kubetester.automation_config_tester import AutomationConfigTester
from kubetester.kubetester import build_agent_auth, build_auth, run_periodically
from kubetester.kubetester import (
KubernetesTester,
build_agent_auth,
build_auth,
run_periodically,
)
from kubetester.mongotester import BackgroundHealthChecker
from kubetester.om_queryable_backups import OMQueryableBackup
from opentelemetry import trace
from requests.adapters import HTTPAdapter, Retry
from tests import test_logger
from tests.common.ops_manager.cloud_manager import is_cloud_qa

from .kubetester import get_env_var_or_fail
skip_if_cloud_manager = pytest.mark.skipif(is_cloud_qa(), reason="Do not run in Cloud Manager")


def running_cloud_manager():
"Determines if the current test is running against Cloud Manager"
return get_env_var_or_fail("OM_HOST") == "https://cloud-qa.mongodb.com"


skip_if_cloud_manager = pytest.mark.skipif(running_cloud_manager(), reason="Do not run in Cloud Manager")
logger = test_logger.get_test_logger(__name__)


class BackupStatus(str, Enum):
Expand Down Expand Up @@ -421,7 +422,7 @@ def om_request():
span.set_attribute(key=f"mck.om.request.retries", value=retries - retry_count)
return resp
except Exception as e:
print(f"Encountered exception: {e} on retry number {retries-retry_count}")
print(f"Encountered exception: {e} on retry number {retries - retry_count}")
span.set_attribute(key=f"mck.om.request.exception", value=str(e))
last_exception = e
time.sleep(1)
Expand Down Expand Up @@ -685,6 +686,42 @@ def api_update_version_manifest(self, major_version: str = "8.0"):
body = requests.get(url=f"https://opsmanager.mongodb.com/static/version_manifest/{major_version}.json").json()
self.om_request("put", "/versionManifest", json_object=body)

def api_get_automation_status(self) -> dict[str, str]:
return self.om_request("get", f"/groups/{self.context.project_id}/automationStatus").json()

def wait_agents_ready(self, timeout: Optional[int] = 600):
"""Waits until all the agents reached the goal automation config version."""
log_prefix = f"[{self.context.group_name}/{self.context.project_id}] "

def agents_are_ready():
auto_status = self.api_get_automation_status()
goal_version = auto_status.get("goalVersion")

logger.info(f"{log_prefix}Checking if all agent processes have reached goal version: {goal_version}")
processes_not_ready = []
for process in auto_status.get("processes", []):
process_name = process.get("name", "unknown")
process_version = process.get("lastGoalVersionAchieved")
if process_version != goal_version:
logger.info(
f"{log_prefix}Process {process_name} at version {process_version}, expected {goal_version}"
)
processes_not_ready.append(process_name)

all_processes_ready = len(processes_not_ready) == 0
if all_processes_ready:
logger.info(f"{log_prefix}All agent processes have reached the goal version")
else:
logger.info(f"{log_prefix}{len(processes_not_ready)} processes have not yet reached the goal version")

return all_processes_ready

KubernetesTester.wait_until(
agents_are_ready,
timeout=timeout,
sleep_time=3,
)


class OMBackgroundTester(BackgroundHealthChecker):
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import os


def is_cloud_qa() -> bool:
return os.getenv("ops_manager_version", "cloud_qa") == "cloud_qa"
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,16 @@ spec:

backup:
enabled: false

# adding this just to avoid wizard when opening OM UI
configuration:
automation.versions.source: mongodb
mms.adminEmailAddr: cloud-manager-support@mongodb.com
mms.fromEmailAddr: cloud-manager-support@mongodb.com
mms.ignoreInitialUiSetup: "true"
mms.mail.hostname: email-smtp.us-east-1.amazonaws.com
mms.mail.port: "465"
mms.mail.ssl: "true"
mms.mail.transport: smtp
mms.minimumTLSVersion: TLSv1.2
mms.replyToEmailAddr: cloud-manager-support@mongodb.com
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,15 @@ spec:
- SCRAM
agent:
logLevel: DEBUG
statefulSet:
spec:
template:
spec:
containers:
- name: mongodb-enterprise-database
resources:
limits:
cpu: "2"
memory: 2Gi
requests:
cpu: "1"
memory: 1Gi
podSpec:
podTemplate:
spec:
containers:
- name: mongodb-enterprise-database
resources:
limits:
cpu: "2"
memory: 2Gi
requests:
cpu: "1"
memory: 1Gi
30 changes: 30 additions & 0 deletions docker/mongodb-kubernetes-tests/tests/search/om_deployment.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from typing import Optional

from kubetester import try_load
from kubetester.kubetester import fixture as yaml_fixture
from kubetester.kubetester import is_multi_cluster
from kubetester.opsmanager import MongoDBOpsManager
from pytest import fixture
from tests.common.ops_manager.cloud_manager import is_cloud_qa
from tests.conftest import get_custom_appdb_version, get_custom_om_version
from tests.opsmanager.withMonitoredAppDB.conftest import enable_multi_cluster_deployment


def get_ops_manager(namespace: str) -> Optional[MongoDBOpsManager]:
if is_cloud_qa():
return None

resource: MongoDBOpsManager = MongoDBOpsManager.from_yaml(
yaml_fixture("om_ops_manager_basic.yaml"), namespace=namespace
)

if try_load(resource):
return resource

resource.set_version(get_custom_om_version())
resource.set_appdb_version(get_custom_appdb_version())

if is_multi_cluster():
enable_multi_cluster_deployment(resource)

return resource
Loading