Skip to content

Commit

Permalink
Add dind test to validate docker build (#94)
Browse files Browse the repository at this point in the history
* Add dind test to validate docker build

* Updates to see if we can get this to work

* Add fake model for testing

* Fix all the things, move tests to a separate file

* Whoops

* Rename

* Fix

* Fix name collision in tests
  • Loading branch information
JoshuaC215 authored Nov 18, 2024
1 parent 5f27dc5 commit aaf914d
Show file tree
Hide file tree
Showing 6 changed files with 115 additions and 2 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,13 @@ jobs:
if: github.repository == 'JoshuaC215/agent-service-toolkit'
uses: ./.github/workflows/test.yml

test-docker:
if: github.repository == 'JoshuaC215/agent-service-toolkit'
uses: ./.github/workflows/test-docker.yml

build:
runs-on: ubuntu-latest
needs: test
needs: [test, test-docker]

steps:
- uses: actions/checkout@v4
Expand Down
67 changes: 66 additions & 1 deletion .github/workflows/test-docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,35 @@ permissions:
contents: read

jobs:
docker-build:
test-docker:
runs-on: ubuntu-latest

services:
dind:
image: docker:dind
ports:
- 2375:2375
options: >-
--privileged
--health-cmd "docker info"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
driver-opts: network=host

- name: Build service image
uses: docker/build-push-action@v3
with:
context: .
push: false
load: true
tags: agent-service-toolkit.service:${{ github.sha }}
file: docker/Dockerfile.service

Expand All @@ -33,5 +48,55 @@ jobs:
with:
context: .
push: false
load: true
tags: agent-service-toolkit.app:${{ github.sha }}
file: docker/Dockerfile.app

- name: Start service container
run: docker run -d --name service-container --network host -e USE_FAKE_MODEL=true agent-service-toolkit.service:${{ github.sha }}

- name: Confirm service starts correctly
run: |
timeout 30 bash -c '
while ! curl -s http://localhost/health; do
echo "Waiting for service to be ready..."
docker logs service-container
sleep 2
done
'
- name: Run app container
run: docker run -d --name app-container --network host -e AGENT_URL=http://localhost agent-service-toolkit.app:${{ github.sha }}

- name: Confirm app starts correctly
run: |
timeout 30 bash -c '
while ! curl -s http://localhost:8501/healthz; do
echo "Waiting for app to be ready..."
docker logs app-container
sleep 2
done
'
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
- name: Install uv
uses: astral-sh/setup-uv@v2
with:
version: "0.4.12"
- name: Install ONLY CLIENT dependencies with uv
run: |
uv pip install -r requirements.txt
uv pip install pytest pytest-env
env:
UV_SYSTEM_PYTHON: 1
- name: Run integration tests
run: pytest tests/integration -v --run-docker

- name: Clean up containers
if: always()
run: |
docker stop service-container app-container || true
docker rm service-container app-container || true
4 changes: 4 additions & 0 deletions src/agents/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from langchain_anthropic import ChatAnthropic
from langchain_aws import ChatBedrock
from langchain_community.chat_models import FakeListChatModel
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_groq import ChatGroq
Expand All @@ -10,6 +11,7 @@
# NOTE: models with streaming=True will send tokens as they are generated
# if the /stream endpoint is called with stream_tokens=True (the default)
models: dict[str, BaseChatModel] = {}

if os.getenv("OPENAI_API_KEY") is not None:
models["gpt-4o-mini"] = ChatOpenAI(model="gpt-4o-mini", temperature=0.5, streaming=True)
if os.getenv("GROQ_API_KEY") is not None:
Expand All @@ -26,6 +28,8 @@
models["bedrock-haiku"] = ChatBedrock(
model_id="anthropic.claude-3-5-haiku-20241022-v1:0", temperature=0.5
)
if os.getenv("USE_FAKE_MODEL") == "true":
models["fake"] = FakeListChatModel(responses=["This is a test response from the fake model."])

if not models:
print("No LLM available. Please set environment variables to enable at least one LLM.")
Expand Down
6 changes: 6 additions & 0 deletions src/service/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,4 +237,10 @@ def history(input: ChatHistoryInput) -> ChatHistory:
raise HTTPException(status_code=500, detail="Unexpected error")


@app.get("/health")
async def health_check():
"""Health check endpoint."""
return {"status": "ok"}


app.include_router(router)
19 changes: 19 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import pytest


def pytest_addoption(parser):
parser.addoption(
"--run-docker", action="store_true", default=False, help="run docker integration tests"
)


def pytest_configure(config):
config.addinivalue_line("markers", "docker: mark test as requiring docker containers")


def pytest_collection_modifyitems(config, items):
if not config.getoption("--run-docker"):
skip_docker = pytest.mark.skip(reason="need --run-docker option to run")
for item in items:
if "docker" in item.keywords:
item.add_marker(skip_docker)
15 changes: 15 additions & 0 deletions tests/integration/test_service_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import pytest

from client import AgentClient


@pytest.mark.docker
def test_service_with_fake_model():
"""Test the service using the fake model.
This test requires the service container to be running with USE_FAKE_MODEL=true
"""
client = AgentClient("http://localhost", agent="chatbot")
response = client.invoke("Tell me a joke?", model="fake")
assert response.type == "ai"
assert response.content == "This is a test response from the fake model."

0 comments on commit aaf914d

Please sign in to comment.