Skip to content

Commit

Permalink
fix: price page init data;perf: usage code;fix: reasoning tokens;fix:…
Browse files Browse the repository at this point in the history
… workflow basic node cannot upgrade (#3816)

* fix: img read

* fix: price page init data

* perf: ai model avatar

* perf: refresh in change team

* perf: null checker

* perf: usage code

* fix: reasoning tokens

* fix: workflow basic node cannot upgrade

* perf: model refresh

* perf: icon refresh
  • Loading branch information
c121914yu authored Feb 18, 2025
1 parent ccf28d8 commit 09205e4
Show file tree
Hide file tree
Showing 32 changed files with 374 additions and 249 deletions.
14 changes: 10 additions & 4 deletions docSite/content/zh-cn/docs/development/upgrading/4822.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,13 @@ weight: 802
## 完整更新内容

1. 新增 - AI 对话节点解析 <think></think> 标签内容,便于各类模型进行思考链输出。
2. 修复 - 思考链流输出时,有时与正文顺序偏差。
3. 修复 - API 调用工作流,如果传递的图片不支持 Head 检测时,图片会被过滤。已增加该类错误检测,避免被错误过滤。
4. 修复 - 模板市场部分模板错误。
5. 修复 - 免登录窗口无法正常判断语言识别是否开启。
2. 优化 - 模型未配置时提示,减少冲突提示。
3. 优化 - 使用记录代码。
4. 修复 - 思考内容未进入到输出 Tokens.
5. 修复 - 思考链流输出时,有时与正文顺序偏差。
6. 修复 - API 调用工作流,如果传递的图片不支持 Head 检测时,图片会被过滤。已增加该类错误检测,避免被错误过滤。
7. 修复 - 模板市场部分模板错误。
8. 修复 - 免登录窗口无法正常判断语言识别是否开启。
9. 修复 - 对话日志导出,未兼容 sub path。
10. 修复 - list 接口在联查 member 时,存在空指针可能性。
11. 修复 - 工作流基础节点无法升级。
2 changes: 1 addition & 1 deletion packages/global/common/file/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ export const ReadFileBaseUrl = `${process.env.FILE_DOMAIN || process.env.FE_DOMA

export const documentFileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';
export const imageFileType =
'.jpg, .jpeg, .png, .gif, .bmp, .webp, .svg, .tiff, .tif, .ico, .heic, .heif, .avif';
'.jpg, .jpeg, .png, .gif, .bmp, .webp, .svg, .tiff, .tif, .ico, .heic, .heif, .avif, .raw, .cr2, .nef, .arw, .dng, .psd, .ai, .eps, .emf, .wmf, .jfif, .exif, .pgm, .ppm, .pbm, .jp2, .j2k, .jpf, .jpx, .jpm, .mj2, .xbm, .pcx';
1 change: 1 addition & 0 deletions packages/global/core/ai/type.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ export type ChatCompletionMessageParam = (
| CustomChatCompletionToolMessageParam
| CustomChatCompletionAssistantMessageParam
) & {
reasoning_text?: string;
dataId?: string;
hideInUI?: boolean;
};
Expand Down
20 changes: 10 additions & 10 deletions packages/global/core/chat/adapt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,16 @@ export const chats2GPTMessages = ({

messages.forEach((item) => {
const dataId = reserveId ? item.dataId : undefined;
if (item.obj === ChatRoleEnum.Human) {
if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else if (item.obj === ChatRoleEnum.Human) {
const value = item.value
.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
Expand Down Expand Up @@ -80,15 +89,6 @@ export const chats2GPTMessages = ({
role: ChatCompletionRequestMessageRoleEnum.User,
content: simpleUserContentPart(value)
});
} else if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else {
const aiResults: ChatCompletionMessageParam[] = [];

Expand Down
11 changes: 7 additions & 4 deletions packages/service/common/file/image/controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,18 @@ export async function uploadMongoImg({
const [base64Mime, base64Data] = base64Img.split(',');
// Check if mime type is valid
if (!base64MimeRegex.test(base64Mime)) {
return Promise.reject('Invalid image mime type');
return Promise.reject('Invalid image base64');
}

const mime = `image/${base64Mime.match(base64MimeRegex)?.[1] ?? 'image/jpeg'}`;
const binary = Buffer.from(base64Data, 'base64');
const extension = mime.split('/')[1];
let extension = mime.split('/')[1];
if (extension.startsWith('x-')) {
extension = extension.substring(2); // Remove 'x-' prefix
}

if (!imageFileType.includes(`.${extension}`)) {
return Promise.reject('Invalid image file type');
if (!extension || !imageFileType.includes(`.${extension}`)) {
return Promise.reject(`Invalid image file type: ${mime}`);
}

const { _id } = await MongoImage.create({
Expand Down
2 changes: 1 addition & 1 deletion packages/service/common/string/tiktoken/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export const countGptMessagesTokens = async (
number
>({
name: WorkerNameEnum.countGptMessagesTokens,
maxReservedThreads: global.systemEnv?.tokenWorkers || 50
maxReservedThreads: global.systemEnv?.tokenWorkers || 30
});

const total = await workerController.run({ messages, tools, functionCall });
Expand Down
3 changes: 2 additions & 1 deletion packages/service/core/workflow/dispatch/chat/oneapi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const AIMessages: ChatCompletionMessageParam[] = [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: answerText
content: answerText,
reasoning_text: reasoningText // reasoning_text is only recorded for response, but not for request
}
];

Expand Down
9 changes: 7 additions & 2 deletions packages/service/core/workflow/dispatch/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -232,9 +232,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
}

if (toolResponses !== undefined) {
if (toolResponses !== undefined && toolResponses !== null) {
if (Array.isArray(toolResponses) && toolResponses.length === 0) return;
if (typeof toolResponses === 'object' && Object.keys(toolResponses).length === 0) return;
if (
!Array.isArray(toolResponses) &&
typeof toolResponses === 'object' &&
Object.keys(toolResponses).length === 0
)
return;
toolRunResponse = toolResponses;
}

Expand Down
116 changes: 112 additions & 4 deletions packages/service/support/wallet/usage/controller.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,114 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { MongoUsage } from './schema';
import { ClientSession } from '../../../common/mongo';
import { ClientSession, Types } from '../../../common/mongo';
import { addLog } from '../../../common/system/log';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
import { i18nT } from '../../../../web/i18n/utils';
import { pushConcatBillTask, pushReduceTeamAiPointsTask } from './utils';

import { POST } from '../../../common/api/plusRequest';
import { FastGPTProUrl } from '../../../common/system/constants';

export async function createUsage(data: CreateUsageProps) {
try {
// In FastGPT server
if (FastGPTProUrl) {
await POST('/support/wallet/usage/createUsage', data);
} else if (global.reduceAiPointsQueue) {
// In FastGPT pro server
await MongoUsage.create(data);
pushReduceTeamAiPointsTask({ teamId: data.teamId, totalPoints: data.totalPoints });

if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
}
} catch (error) {
addLog.error('createUsage error', error);
}
}
export async function concatUsage(data: ConcatUsageProps) {
try {
// In FastGPT server
if (FastGPTProUrl) {
await POST('/support/wallet/usage/concatUsage', data);
} else if (global.reduceAiPointsQueue) {
const {
teamId,
billId,
totalPoints = 0,
listIndex,
inputTokens = 0,
outputTokens = 0
} = data;

// billId is required and valid
if (!billId || !Types.ObjectId.isValid(billId)) return;

// In FastGPT pro server
pushConcatBillTask([
{
billId,
listIndex,
inputTokens,
outputTokens,
totalPoints
}
]);
pushReduceTeamAiPointsTask({ teamId, totalPoints });

if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
}
} catch (error) {
addLog.error('concatUsage error', error);
}
}

export const createChatUsage = ({
appName,
appId,
pluginId,
teamId,
tmbId,
source,
flowUsages
}: {
appName: string;
appId?: string;
pluginId?: string;
teamId: string;
tmbId: string;
source: UsageSourceEnum;
flowUsages: ChatNodeUsageType[];
}) => {
const totalPoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);

createUsage({
teamId,
tmbId,
appName,
appId,
pluginId,
totalPoints,
source,
list: flowUsages.map((item) => ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
inputTokens: item.inputTokens,
outputTokens: item.outputTokens
}))
});
addLog.debug(`Create chat usage`, {
source,
teamId,
totalPoints
});
return { totalPoints };
};

export const createTrainingUsage = async ({
teamId,
Expand Down Expand Up @@ -29,21 +137,21 @@ export const createTrainingUsage = async ({
totalPoints: 0,
list: [
{
moduleName: 'support.wallet.moduleName.index',
moduleName: i18nT('common:support.wallet.moduleName.index'),
model: vectorModel,
amount: 0,
inputTokens: 0,
outputTokens: 0
},
{
moduleName: 'support.wallet.moduleName.qa',
moduleName: i18nT('common:support.wallet.moduleName.qa'),
model: agentModel,
amount: 0,
inputTokens: 0,
outputTokens: 0
},
{
moduleName: 'core.dataset.training.Auto mode',
moduleName: i18nT('common:core.dataset.training.Auto mode'),
model: agentModel,
amount: 0,
inputTokens: 0,
Expand Down
12 changes: 12 additions & 0 deletions packages/service/support/wallet/usage/type.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
export type ConcatBillQueueItemType = {
billId: string;
listIndex?: number;
totalPoints: number;
inputTokens: number;
outputTokens: number;
};

declare global {
var reduceAiPointsQueue: { teamId: string; totalPoints: number }[];
var concatBillQueue: ConcatBillQueueItemType[];
}
18 changes: 18 additions & 0 deletions packages/service/support/wallet/usage/utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { findAIModel } from '../../../core/ai/model';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { ConcatBillQueueItemType } from './type';

export const formatModelChars2Points = ({
model,
Expand Down Expand Up @@ -34,3 +35,20 @@ export const formatModelChars2Points = ({
totalPoints
};
};

export const pushReduceTeamAiPointsTask = ({
teamId,
totalPoints
}: {
teamId: string;
totalPoints: number;
}) => {
global.reduceAiPointsQueue.push({
teamId: String(teamId),
totalPoints
});
};

export const pushConcatBillTask = (data: ConcatBillQueueItemType[]) => {
global.concatBillQueue.push(...data);
};
10 changes: 8 additions & 2 deletions packages/service/worker/countGptMessagesTokens/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ parentPort?.on(
};

const total =
messages.reduce((sum, item) => {
messages.reduce((sum, item, index) => {
// Evaluates the text of toolcall and functioncall
const functionCallPrompt = (() => {
let prompt = '';
Expand Down Expand Up @@ -100,7 +100,13 @@ parentPort?.on(
.join('');
})();

return sum + countPromptTokens(`${contentPrompt}${functionCallPrompt}`, item.role);
// Only the last message computed reasoning_text
const reasoningText = index === messages.length - 1 ? item.reasoning_text || '' : '';

return (
sum +
countPromptTokens(`${reasoningText}${contentPrompt}${functionCallPrompt}`, item.role)
);
}, 0) +
countToolsTokens(tools) +
countToolsTokens(functionCall);
Expand Down
12 changes: 7 additions & 5 deletions packages/web/components/common/Icon/index.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import React, { useEffect, useState } from 'react';
import React, { useEffect } from 'react';
import type { IconProps } from '@chakra-ui/react';
import { Box, Icon } from '@chakra-ui/react';
import { iconPaths } from './constants';
import type { IconNameType } from './type.d';
import { useRefresh } from '../../../hooks/useRefresh';

const iconCache: Record<string, any> = {};

const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType } & IconProps) => {
const [IconComponent, setIconComponent] = useState<any>(null);
const { refresh } = useRefresh();

useEffect(() => {
if (iconCache[name]) {
setIconComponent(iconCache[name]);
return;
}

Expand All @@ -20,11 +20,13 @@ const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType
const component = { as: icon.default };
// Store in cache
iconCache[name] = component;
setIconComponent(component);
refresh();
})
.catch((error) => console.log(error));
}, [name]);

const IconComponent = iconCache[name];

return !!IconComponent ? (
<Icon
{...IconComponent}
Expand All @@ -40,4 +42,4 @@ const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType
);
};

export default MyIcon;
export default React.memo(MyIcon);
2 changes: 1 addition & 1 deletion projects/app/data/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大,搜索越精确,但是速度越慢。设置为100,有99%+精度。
}
}
2 changes: 1 addition & 1 deletion projects/app/data/model.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大,搜索越精确,但是速度越慢。设置为100,有99%+精度。
},
"llmModels": [
Expand Down
Loading

0 comments on commit 09205e4

Please sign in to comment.