Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add deepseek #378

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified bun.lockb
Binary file not shown.
12 changes: 7 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@
"huggingface",
"ai",
"genai",
"llama"
"llama",
"deepseek",
"deepseek-chat",
"deepseek-R1"
],
"author": "Matt Carey",
"license": "MIT",
Expand All @@ -55,7 +58,8 @@
"tinyglobby": "^0.2.10",
"tslog": "^4.8.2",
"yargs": "^17.7.2",
"zod": "^3.24.1"
"zod": "^3.24.1",
"openai": "^4.79.4"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
Expand All @@ -66,7 +70,5 @@
"npm-dts": "^1.3.12",
"typescript": "^5.1.6"
},
"files": [
"dist/*"
]
"files": ["dist/*"]
}
2 changes: 1 addition & 1 deletion setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ You can now run `code-review-gpt review` in the root directory of any git-enable

### Options

- `--ci` - Used with the `review` command. Options are --ci=("github" | "gitlab"). Defaults to "github" if no option is specified. Runs the application in CI mode. This will use the BASE_SHA and GITHUB_SHA environment variables to determine which files to review. It will also use the GITHUB_TOKEN environment variable to create a comment on the pull request with the review results.
- `--ci` - Used with the `review` command. Options are --ci=("github" | "gitlab" | "azdev"). Defaults to "github" if no option is specified. Runs the application in CI mode. This will use the BASE_SHA and GITHUB_SHA environment variables to determine which files to review. It will also use the GITHUB_TOKEN environment variable to create a comment on the pull request with the review results.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this DeepSeek related?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this DeepSeek related?

no


- `--reviewType` - Used with the 'review' command. The options are --reviewType=("changed" | "full" | "costOptimized). Defaults to "changed" if no option is specified. Specifies whether the review is for the full file or just the changed lines. costOptimized limits the context surrounding the changed lines to 5 lines.

Expand Down
9 changes: 5 additions & 4 deletions src/args.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import rawlist from '@inquirer/rawlist';
import dotenv from 'dotenv';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';

import { modelsNames } from './review/constants';
import type { ReviewArgs } from './common/types';

dotenv.config();
Expand All @@ -22,7 +22,7 @@ const handleNoCommand = async (): Promise<string | number> => {
return command;
};

export const getYargs = async (): Promise<ReviewArgs> => {
export const getYargs = async () => {
return yargs(hideBin(process.argv))
.command('configure', 'Configure the tool')
.command('review', 'Review code changes')
Expand All @@ -49,6 +49,7 @@ export const getYargs = async (): Promise<ReviewArgs> => {
})
.option('model', {
description: 'The model to use for generating the review.',
choices: modelsNames,
type: 'string',
default: 'gpt-4o-mini',
})
Expand Down Expand Up @@ -82,10 +83,10 @@ export const getYargs = async (): Promise<ReviewArgs> => {
})
.option('provider', {
description: 'Provider to use for AI',
choices: ['openai', 'azureai', 'bedrock'],
choices: ['openai', 'azureai', 'bedrock', 'deepseek'],
type: 'string',
default: 'openai',
})
.help()
.parse();
.parse() as ReviewArgs;
};
90 changes: 71 additions & 19 deletions src/common/model/AIModel.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';

import { OpenAI as DeepSeekAI } from 'openai';
import type { ZodType } from 'zod';
import type { IFeedback } from '../types';
import { logger } from '../utils/logger';
import type { AIModelName } from '../../review/constants';
import type { ProviderOptions } from '../../common/types';

interface IAIModel {
modelName: string;
provider: string;
modelName: AIModelName;
provider: ProviderOptions;
temperature: number;
apiKey: string;
retryCount?: number;
organization: string | undefined;
organization?: string;
}

export class AIModel {
private model: ChatOpenAI;
private model: ChatOpenAI | ReturnType<typeof createDeepSeekModel>;

constructor(options: IAIModel) {
switch (options.provider) {
Expand All @@ -26,11 +28,19 @@ export class AIModel {
modelName: options.modelName,
});
break;
case "azureai":
case 'azureai':
this.model = new AzureChatOpenAI({
temperature: options.temperature,
});
break;
case 'deepseek':
this.model = createDeepSeekModel({
apiKey: options.apiKey,
baseURL: 'https://api.deepseek.com',
temperature: options.temperature,
model: options.modelName as Extract<AIModelName, `deepseek-${string}`>,
});
break;
case 'bedrock':
throw new Error('Bedrock provider not implemented');
default:
Expand All @@ -39,25 +49,33 @@ export class AIModel {
}

public async callModel(prompt: string): Promise<string> {
const message = await this.model.invoke(prompt);
return message.content[0] as string;
if ('callModel' in this.model) {
return this.model.callModel(prompt);
} else {
const message = await this.model.invoke(prompt);
return message.content[0] as string;
}
}

public async callStructuredModel(prompt: string, schema: ZodType): Promise<IFeedback[]> {
const modelWithStructuredOutput = this.model.withStructuredOutput(schema, {
method: 'jsonSchema',
strict: true,
includeRaw: true,
});
const res = await modelWithStructuredOutput.invoke(prompt);
if ('callStructuredModel' in this.model) {
return this.model.callStructuredModel(prompt);
} else {
const modelWithStructuredOutput = this.model.withStructuredOutput(schema, {
method: 'jsonSchema',
strict: true,
includeRaw: true,
});
const res = await modelWithStructuredOutput.invoke(prompt);

logger.debug('LLm response', res);
logger.debug('LLm response', res);

if (res.parsed) {
return res.parsed;
}
if (res.parsed) {
return res.parsed;
}

return parseJson(res.raw.content[0] as string);
return parseJson(res.raw.content[0] as string);
}
}
}

Expand All @@ -80,3 +98,37 @@ const parseJson = (json: string) => {
logger.debug('Escaped JSON', jsonString);
return JSON.parse(jsonString);
};

interface DeepSeekOptions {
apiKey: string;
baseURL: string;
temperature: number; // https://api-docs.deepseek.com/quick_start/parameter_settings 0.0 | 1.0 | 1.3 | 1.5
model: Extract<AIModelName, `deepseek-${string}`>;
}

function createDeepSeekModel(options: DeepSeekOptions) {
const { apiKey, baseURL, temperature = 1.0, model } = options;

const client = new DeepSeekAI({ baseURL, apiKey });

return {
callModel: async (prompt: string) => {
const completion = await client.chat.completions.create({
messages: [{ role: 'user', content: prompt }],
model,
temperature,
});
return completion.choices[0].message.content as string;
},

callStructuredModel: async (prompt: string): Promise<IFeedback[]> => {
const completion = await client.chat.completions.create({
messages: [{ role: 'user', content: prompt }],
model,
temperature,
});
const content = completion.choices[0].message.content;
return parseJson(content as string);
},
};
}
17 changes: 12 additions & 5 deletions src/common/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { z } from 'zod';
import type { feedbackSchema, reviewSchema } from '../review/prompt/schemas';
import type { AIModelName } from '../review/constants';

export type AskAIResponse = {
markdownReport: string;
Expand Down Expand Up @@ -35,17 +36,23 @@ export enum PlatformOptions {
AZDEV = 'azdev',
}

export type CIOptions = 'github' | 'gitlab' | 'azdev';

export type ProviderOptions = 'openai' | 'azureai' | 'bedrock' | 'deepseek';

export type ReviewType = 'full' | 'changed' | 'costOptimized';

export type ReviewArgs = {
[x: string]: unknown;
ci: string | undefined;
setupTarget: string;
ci: CIOptions | undefined;
setupTarget: CIOptions;
commentPerFile: boolean;
model: string;
reviewType: string;
model: AIModelName;
reviewType: ReviewType;
reviewLanguage: string | undefined;
org: string | undefined;
remote: string | undefined;
provider: string;
provider: ProviderOptions;
_: (string | number)[];
$0: string;
};
18 changes: 15 additions & 3 deletions src/review/constants.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
export const signOff =
'#### Powered by [Code Review GPT](https://github.com/mattzcarey/code-review-gpt)';

export type AIModelName = (typeof modelInfo)[number]['model'];

export const modelInfo = [
{
model: 'gpt-4o-mini',
Expand Down Expand Up @@ -34,9 +36,19 @@ export const modelInfo = [
model: 'gpt-3.5-turbo-16k',
maxPromptLength: 45000, //16k tokens
},
]; // Response needs about 1k tokens ~= 3k characters
{
model: 'deepseek-chat',
maxPromptLength: 180000, //64k tokens
},
{
model: 'deepseek-reasoner',
maxPromptLength: 180000, //64k tokens
},
] as const; // Response needs about 1k tokens ~= 3k characters

export const modelsNames = modelInfo.map((item) => item.model);

export const languageMap: { [key: string]: string } = {
export const languageMap = {
'.js': 'JavaScript',
'.ts': 'TypeScript',
'.py': 'Python',
Expand All @@ -61,7 +73,7 @@ export const languageMap: { [key: string]: string } = {
'.tf': 'Terraform',
'.hcl': 'Terraform',
'.swift': 'Swift',
};
} as const;

export const supportedFiles = new Set(Object.keys(languageMap));

Expand Down
7 changes: 5 additions & 2 deletions src/review/llm/askAI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,16 @@ import type { AskAIResponse } from '../../common/types';
import { logger } from '../../common/utils/logger';
import { processFeedbacks } from './feedbackProcessor';
import { markdownReport } from './generateMarkdownReport';
import { AIModelName } from '../constants';

import type { ProviderOptions } from '../../common/types';

export const askAI = async (
prompts: string[],
modelName: string,
modelName: AIModelName,
openAIApiKey: string,
organization: string | undefined,
provider: string
provider: ProviderOptions
): Promise<AskAIResponse> => {
logger.info('Asking the experts...');

Expand Down
10 changes: 6 additions & 4 deletions src/test/run/runTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { logger } from '../../common/utils/logger';
import { askAI } from '../../review/llm/askAI';
import { constructPromptsArray } from '../../review/prompt/constructPrompt/constructPrompt';
import type { TestCase } from '../types';
import type { AIModelName } from '../../review/constants';
import type { ReviewType } from '../../common/types';
import {
generateTestReport,
generateTestResultsSummary,
Expand All @@ -25,10 +27,10 @@ import {
const runTest = async (
openAIApiKey: string,
testCase: TestCase,
modelName: string,
modelName: AIModelName,
maxPromptLength: number,
vectorStore: MemoryVectorStore,
reviewType: string,
reviewType: ReviewType,
reviewLanguage?: string
// eslint-disable-next-line max-params
): Promise<testResult> => {
Expand Down Expand Up @@ -88,10 +90,10 @@ const runTest = async (
export const runTests = async (
openAIApiKey: string,
testCases: TestCase[],
modelName: string,
modelName: AIModelName,
maxPromptLength: number,
vectorStore: MemoryVectorStore,
reviewType: string,
reviewType: ReviewType,
reviewLanguage?: string
// eslint-disable-next-line max-params
): Promise<string> => {
Expand Down