Skip to content

Commit

Permalink
Merge pull request #774 from ai16z/shaw-add-llms
Browse files Browse the repository at this point in the history
Integrate more LLMs, fix case issue in switch
  • Loading branch information
lalalune authored Dec 2, 2024
2 parents 67ca455 + eb12038 commit a62c8f4
Show file tree
Hide file tree
Showing 5 changed files with 61 additions and 8 deletions.
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ GROQ_API_KEY= # Starts with gsk_
OPENROUTER_API_KEY=
GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key

ALI_BAILIAN_API_KEY= # Ali Bailian API Key
VOLENGINE_API_KEY= # VolEngine API Key

# Speech Synthesis
ELEVENLABS_XI_API_KEY= # API key from elevenlabs

Expand Down
10 changes: 10 additions & 0 deletions agent/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,16 @@ export function getTokenForProvider(
return (
character.settings?.secrets?.FAL_API_KEY || settings.FAL_API_KEY
);
case ModelProviderName.ALI_BAILIAN:
return (
character.settings?.secrets?.ALI_BAILIAN_API_KEY ||
settings.ALI_BAILIAN_API_KEY
);
case ModelProviderName.VOLENGINE:
return (
character.settings?.secrets?.VOLENGINE_API_KEY ||
settings.VOLENGINE_API_KEY
);
}
}

Expand Down
7 changes: 6 additions & 1 deletion packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ export async function generateText({
// OPENAI & LLAMACLOUD shared same structure.
case ModelProviderName.OPENAI:
case ModelProviderName.ETERNALAI:
case ModelProviderName.ALI_BAILIAN:
case ModelProviderName.VOLENGINE:
case ModelProviderName.LLAMACLOUD: {
elizaLogger.debug("Initializing OpenAI model.");
const openai = createOpenAI({ apiKey, baseURL: endpoint });
Expand Down Expand Up @@ -373,7 +375,7 @@ export async function generateText({
elizaLogger.debug("Received response from Heurist model.");
break;
}
case ModelProviderName.GAIANET:
case ModelProviderName.GAIANET: {
elizaLogger.debug("Initializing GAIANET model.");
const openai = createOpenAI({ apiKey, baseURL: endpoint });

Expand All @@ -393,6 +395,7 @@ export async function generateText({
response = openaiResponse;
elizaLogger.debug("Received response from GAIANET model.");
break;
}

case ModelProviderName.GALADRIEL: {
elizaLogger.debug("Initializing Galadriel model.");
Expand Down Expand Up @@ -1151,6 +1154,8 @@ export async function handleProvider(
switch (provider) {
case ModelProviderName.OPENAI:
case ModelProviderName.ETERNALAI:
case ModelProviderName.ALI_BAILIAN:
case ModelProviderName.VOLENGINE:
case ModelProviderName.LLAMACLOUD:
return await handleOpenAI(options);
case ModelProviderName.ANTHROPIC:
Expand Down
45 changes: 38 additions & 7 deletions packages/core/src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -314,16 +314,47 @@ export const models: Models = {
},
endpoint: settings.GAIANET_SERVER_URL || "http://localhost:8080/v1",
model: {
[ModelClass.SMALL]:
settings.GAIANET_MODEL || "llama3.2",
[ModelClass.MEDIUM]:
settings.GAIANET_MODEL || "llama3.2",
[ModelClass.LARGE]:
settings.GAIANET_MODEL || "llama3.2",
[ModelClass.SMALL]: settings.GAIANET_MODEL || "llama3.2",
[ModelClass.MEDIUM]: settings.GAIANET_MODEL || "llama3.2",
[ModelClass.LARGE]: settings.GAIANET_MODEL || "llama3.2",
[ModelClass.EMBEDDING]:
settings.GAIANET_EMBEDDING_MODEL || "nomic-embed",
},
}
},
[ModelProviderName.ALI_BAILIAN]: {
endpoint: "https://dashscope.aliyuncs.com/compatible-mode/v1",
settings: {
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
frequency_penalty: 0.4,
presence_penalty: 0.4,
temperature: 0.6,
},
model: {
[ModelClass.SMALL]: "qwen-turbo",
[ModelClass.MEDIUM]: "qwen-plus",
[ModelClass.LARGE]: "qwen-max",
[ModelClass.IMAGE]: "wanx-v1",
},
},
[ModelProviderName.VOLENGINE]: {
endpoint: "https://open.volcengineapi.com/api/v3/",
settings: {
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
frequency_penalty: 0.4,
presence_penalty: 0.4,
temperature: 0.6,
},
model: {
[ModelClass.SMALL]: "doubao-lite-128k",
[ModelClass.MEDIUM]: "doubao-pro-128k",
[ModelClass.LARGE]: "doubao-pro-128k",
[ModelClass.EMBEDDING]: "doubao-embedding",
},
},
};

export function getModel(provider: ModelProviderName, type: ModelClass) {
Expand Down
4 changes: 4 additions & 0 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,8 @@ export type Models = {
[ModelProviderName.GALADRIEL]: Model;
[ModelProviderName.FAL]: Model;
[ModelProviderName.GAIANET]: Model;
[ModelProviderName.ALI_BAILIAN]: Model;
[ModelProviderName.VOLENGINE]: Model;
};

/**
Expand All @@ -224,6 +226,8 @@ export enum ModelProviderName {
GALADRIEL = "galadriel",
FAL = "falai",
GAIANET = "gaianet",
ALI_BAILIAN = "ali_bailian",
VOLENGINE = "volengine",
}

/**
Expand Down

0 comments on commit a62c8f4

Please sign in to comment.