Skip to content

Commit

Permalink
Merge pull request #3672 from 0xbbjoker/0xbbjoker/fix-pglite
Browse files Browse the repository at this point in the history
fix: pglite & migrations
  • Loading branch information
wtfsayo authored Feb 25, 2025
2 parents 13224d6 + 3c2fb1c commit ae790e3
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 81 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -182,25 +182,4 @@ CREATE INDEX "idx_memories_document_id" ON "memories" USING btree (((metadata->>
CREATE INDEX "idx_fragments_order" ON "memories" USING btree (((metadata->>'documentId')),((metadata->>'position')));--> statement-breakpoint
CREATE INDEX "idx_participants_user" ON "participants" USING btree ("userId");--> statement-breakpoint
CREATE INDEX "idx_participants_room" ON "participants" USING btree ("roomId");--> statement-breakpoint
CREATE INDEX "idx_relationships_users" ON "relationships" USING btree ("userA","userB");

CREATE EXTENSION IF NOT EXISTS vector;
--> statement-breakpoint

CREATE EXTENSION IF NOT EXISTS fuzzystrmatch;
--> statement-breakpoint

-- Custom SQL migration file, put your code below! --
CREATE INDEX IF NOT EXISTS idx_embeddings_dim384 ON embeddings USING hnsw ("dim_384" vector_cosine_ops);
--> statement-breakpoint

CREATE INDEX IF NOT EXISTS idx_embeddings_dim512 ON embeddings USING hnsw ("dim_512" vector_cosine_ops);
--> statement-breakpoint

CREATE INDEX IF NOT EXISTS idx_embeddings_dim768 ON embeddings USING hnsw ("dim_768" vector_cosine_ops);
--> statement-breakpoint

CREATE INDEX IF NOT EXISTS idx_embeddings_dim1024 ON embeddings USING hnsw ("dim_1024" vector_cosine_ops);
--> statement-breakpoint

CREATE INDEX IF NOT EXISTS idx_embeddings_dim1536 ON embeddings USING hnsw ("dim_1536" vector_cosine_ops);
CREATE INDEX "idx_relationships_users" ON "relationships" USING btree ("userA","userB");
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"id": "1265cc44-23ec-4e51-9ea3-8ea569f783d8",
"id": "9c756016-9f8e-4607-99f0-4a5c3970389d",
"prevId": "00000000-0000-0000-0000-000000000000",
"version": "7",
"dialect": "postgresql",
Expand Down
4 changes: 2 additions & 2 deletions packages/plugin-sql/drizzle/migrations/meta/_journal.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
{
"idx": 0,
"version": "7",
"when": 1740454059938,
"tag": "20250225032739_init",
"when": 1740511703191,
"tag": "20250225192823_init_schema",
"breakpoints": true
}
]
Expand Down
66 changes: 10 additions & 56 deletions packages/plugin-sql/src/pg-lite/manager.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import { PGlite } from "@electric-sql/pglite";
import type { PGliteOptions } from "@electric-sql/pglite";
import { PGlite, type PGliteOptions } from "@electric-sql/pglite";
import { vector } from "@electric-sql/pglite/vector";
import { fuzzystrmatch } from "@electric-sql/pglite/contrib/fuzzystrmatch";
import { logger } from "@elizaos/core";
import type { IDatabaseClientManager } from "../types";
import { fileURLToPath } from 'node:url';
import path from "node:path";
import { IDatabaseClientManager } from "../types";
import { migrate } from "drizzle-orm/pglite/migrator";
import { fileURLToPath } from 'url';
import path from "path";
import { drizzle } from "drizzle-orm/pglite";
import fs from 'node:fs/promises';


const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
Expand Down Expand Up @@ -112,56 +110,12 @@ export class PGliteClientManager implements IDatabaseClientManager<PGlite> {

async runMigrations(): Promise<void> {
try {
// Wait for client to be ready
await this.client.waitReady;

// Ensure extensions exist before running migrations
await this.ensureExtensions();


// Get all SQL migration files
const migrationsDir = path.resolve(__dirname, "../drizzle/migrations");
const files = await fs.readdir(migrationsDir);
const migrationFiles = files.filter(f => f.endsWith('.sql'));

// Sort migration files to ensure correct order
migrationFiles.sort();

// Process each migration file
for (const migrationFile of migrationFiles) {
const migrationPath = path.join(migrationsDir, migrationFile);
const migrationContent = await fs.readFile(migrationPath, 'utf8');

// Split the content into individual SQL statements
const statements = migrationContent
.split(/(?<!--);\s*(?=(?:[^']*'[^']*')*[^']*$)/g)
.map(stmt => stmt.trim())
.filter(stmt => {
// Filter out empty statements and statement-breakpoint comments
return stmt.length > 0 && !stmt.includes('--> statement-breakpoint');
});

// Execute each statement separately
for (const statement of statements) {
try {
if (statement.toLowerCase().includes('create extension')) {
// Skip extension creation as we handle it separately
continue;
}
await this.client.query(statement);
} catch (error) {
// Ignore errors about existing tables/constraints
if (error instanceof Error &&
!error.message.includes('already exists')) {
throw error;
}
}
}

logger.info(`Migration ${migrationFile} completed`);
}

logger.info("All migrations completed successfully!");
const db = drizzle(this.client);
await migrate(db, {
migrationsFolder: path.resolve(__dirname, "../drizzle/migrations"),
});
logger.info("Migrations completed successfully!");
} catch (error) {
logger.error("Failed to run database migrations:", error);
throw error;
Expand Down

0 comments on commit ae790e3

Please sign in to comment.