Skip to content

Commit

Permalink
Skip containerized tests in GHA on M-series mac
Browse files Browse the repository at this point in the history
  • Loading branch information
benjamincburns committed Oct 29, 2024
1 parent f8842f7 commit 1889c31
Show file tree
Hide file tree
Showing 2 changed files with 184 additions and 144 deletions.
295 changes: 151 additions & 144 deletions libs/checkpoint-mongodb/src/tests/migrations/1_object_metadata.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,159 +18,166 @@ import {
uuid6,
} from "@langchain/langgraph-checkpoint";
import { Migration1ObjectMetadata } from "../../migrations/1_object_metadata.js";
import { isSkippedCIEnvironment } from "../utils.js";

describe("1_object_metadata", () => {
const dbName = "test_db";
let container: StartedMongoDBContainer;
let client: MongoClient;

beforeAll(async () => {
container = await new MongoDBContainer("mongo:6.0.1").start();
const connectionString = `mongodb://127.0.0.1:${container.getMappedPort(
27017
)}/${dbName}?directConnection=true`;
client = new MongoClient(connectionString);
});

afterAll(async () => {
await client.close();
await container.stop();
});

describe("isApplicable", () => {
// MongoDBSaver handles this automatically in initializeSchemaVersion
it("should want to apply on empty database", async () => {
// ensure database is empty
const db = client.db(dbName);
await db.dropDatabase();

const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
});
expect(await migration.isApplicable()).toBe(true);
if (!isSkippedCIEnvironment()) {
const dbName = "test_db";
let container: StartedMongoDBContainer;
let client: MongoClient;

beforeAll(async () => {
container = await new MongoDBContainer("mongo:6.0.1").start();
const connectionString = `mongodb://127.0.0.1:${container.getMappedPort(
27017
)}/${dbName}?directConnection=true`;
client = new MongoClient(connectionString);
});

it("should not want to apply on database with schema version of 1", async () => {
const db = client.db(dbName);
await db.dropDatabase();
await db.createCollection("schema_version");
await db.collection("schema_version").insertOne({ version: 1 });

const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
});
expect(await migration.isApplicable()).toBe(false);
afterAll(async () => {
await client.close();
await container.stop();
});
});

describe("apply", () => {
const expectedCheckpoints: Record<
string,
{
parent_checkpoint_id?: string;
checkpoint: Binary;
type: string;
metadata: CheckpointMetadata;
thread_id: string;
checkpoint_ns: string;
checkpoint_id: string;
}
> = {};

beforeEach(async () => {
const serde = new JsonPlusSerializer();
const dropDb = client.db(dbName);
await dropDb.dropDatabase();
const db = client.db(dbName);
await db.createCollection("checkpoints");
await db.createCollection("schema_version");

for (let i = 0; i < 10; i += 1) {
const checkpoint_id = uuid6(-3);
const thread_id = uuid6(-3);
const checkpoint_ns = "";

const checkpoint: Checkpoint = {
v: 1,
id: checkpoint_id,
ts: new Date().toISOString(),
channel_values: {},
channel_versions: {},
versions_seen: {},
pending_sends: [],
};

const metadata: CheckpointMetadata = {
source: "update",
step: -1,
writes: {},
parents: {},
};

const [checkpointType, serializedCheckpoint] =
serde.dumpsTyped(checkpoint);
const serializedMetadata = serde.dumpsTyped(metadata)[1];

await db.collection("checkpoints").insertOne({
type: checkpointType,
checkpoint: serializedCheckpoint,
metadata: serializedMetadata,
thread_id,
checkpoint_ns,
checkpoint_id,

describe("isApplicable", () => {
// MongoDBSaver handles this automatically in initializeSchemaVersion
it("should want to apply on empty database", async () => {
// ensure database is empty
const db = client.db(dbName);
await db.dropDatabase();

const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
});
expect(await migration.isApplicable()).toBe(true);
});

expectedCheckpoints[checkpoint_id] = {
checkpoint: new Binary(serializedCheckpoint),
type: checkpointType,
metadata,
thread_id,
checkpoint_ns,
checkpoint_id,
};
}
it("should not want to apply on database with schema version of 1", async () => {
const db = client.db(dbName);
await db.dropDatabase();
await db.createCollection("schema_version");
await db.collection("schema_version").insertOne({ version: 1 });

const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
});
expect(await migration.isApplicable()).toBe(false);
});
});

it("should migrate all checkpoints", async () => {
const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
describe("apply", () => {
const expectedCheckpoints: Record<
string,
{
parent_checkpoint_id?: string;
checkpoint: Binary;
type: string;
metadata: CheckpointMetadata;
thread_id: string;
checkpoint_ns: string;
checkpoint_id: string;
}
> = {};

beforeEach(async () => {
const serde = new JsonPlusSerializer();
const dropDb = client.db(dbName);
await dropDb.dropDatabase();
const db = client.db(dbName);
await db.createCollection("checkpoints");
await db.createCollection("schema_version");

for (let i = 0; i < 10; i += 1) {
const checkpoint_id = uuid6(-3);
const thread_id = uuid6(-3);
const checkpoint_ns = "";

const checkpoint: Checkpoint = {
v: 1,
id: checkpoint_id,
ts: new Date().toISOString(),
channel_values: {},
channel_versions: {},
versions_seen: {},
pending_sends: [],
};

const metadata: CheckpointMetadata = {
source: "update",
step: -1,
writes: {},
parents: {},
};

const [checkpointType, serializedCheckpoint] =
serde.dumpsTyped(checkpoint);
const serializedMetadata = serde.dumpsTyped(metadata)[1];

await db.collection("checkpoints").insertOne({
type: checkpointType,
checkpoint: serializedCheckpoint,
metadata: serializedMetadata,
thread_id,
checkpoint_ns,
checkpoint_id,
});

expectedCheckpoints[checkpoint_id] = {
checkpoint: new Binary(serializedCheckpoint),
type: checkpointType,
metadata,
thread_id,
checkpoint_ns,
checkpoint_id,
};
}
});

it("should migrate all checkpoints", async () => {
const migration = new Migration1ObjectMetadata({
client,
dbName,
checkpointCollectionName: "checkpoints",
checkpointWritesCollectionName: "checkpoint_writes",
schemaVersionCollectionName: "schema_version",
serializer: new JsonPlusSerializer(),
currentSchemaVersion: 1,
});
await migration.apply();

const db = client.db(dbName);
const cursor = await db.collection("checkpoints").find({});

let docCount = 0;
for await (const actual of cursor) {
docCount += 1;
const expected = expectedCheckpoints[actual.checkpoint_id];
expect(actual.parent_checkpoint_id).toBe(
expected.parent_checkpoint_id
);
expect(actual.type).toBe(expected.type);
expect(actual.checkpoint).toEqual(expected.checkpoint);
expect(actual.metadata).toEqual(expected.metadata);
expect(actual.thread_id).toBe(expected.thread_id);
expect(actual.checkpoint_ns).toBe(expected.checkpoint_ns);
expect(actual.checkpoint_id).toBe(expected.checkpoint_id);
}
expect(docCount).toBe(10);
});
await migration.apply();

const db = client.db(dbName);
const cursor = await db.collection("checkpoints").find({});

let docCount = 0;
for await (const actual of cursor) {
docCount += 1;
const expected = expectedCheckpoints[actual.checkpoint_id];
expect(actual.parent_checkpoint_id).toBe(expected.parent_checkpoint_id);
expect(actual.type).toBe(expected.type);
expect(actual.checkpoint).toEqual(expected.checkpoint);
expect(actual.metadata).toEqual(expected.metadata);
expect(actual.thread_id).toBe(expected.thread_id);
expect(actual.checkpoint_ns).toBe(expected.checkpoint_ns);
expect(actual.checkpoint_id).toBe(expected.checkpoint_id);
}
expect(docCount).toBe(10);
});
});
} else {
it.skip("GitHub can't run containers on M-Series macOS runners due to lack of support for nested virtualization.", () => {});
}
});
33 changes: 33 additions & 0 deletions libs/checkpoint-mongodb/src/tests/utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { platform, arch } from "node:os";

function isMSeriesMac() {
return platform() === "darwin" && arch() === "arm64";
}

function isWindows() {
return platform() === "win32";
}

function isCI() {
// eslint-disable-next-line no-process-env
return (process.env.CI ?? "").toLowerCase() === "true";
}

/**
* GitHub Actions doesn't support containers on m-series macOS due to a lack of hypervisor support for nested
* virtualization.
*
* For details, see https://github.com/actions/runner-images/issues/9460#issuecomment-1981203045
*
* GitHub actions also doesn't support Linux containers on Windows, and may never do so. This is in part due to Docker
* Desktop licensing restrictions, and the complexity of setting up Moby or similar without Docker Desktop.
* Unfortunately, TestContainers doesn't support windows containers, so we can't run the tests on Windows either.
*
* For details, see https://github.com/actions/runner/issues/904 and
* https://java.testcontainers.org/supported_docker_environment/windows/#windows-container-on-windows-wcow
*
*
*/
export function isSkippedCIEnvironment() {
return isCI() && (isWindows() || isMSeriesMac());
}

0 comments on commit 1889c31

Please sign in to comment.