feat: auto call frontend tool

This commit is contained in:
xilesun 2025-05-02 12:44:51 +08:00
parent b75368d820
commit bd1233df81
13 changed files with 220 additions and 134 deletions

View File

@ -1,37 +0,0 @@
/**
* This file is part of the NocoBase (R) project.
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
* Authors: NocoBase Team.
*
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
* For more information, please refer to: https://www.nocobase.com/agreement.
*/
import React, { useContext } from 'react';
import { withDynamicSchemaProps } from '@nocobase/client';
import { createContext } from 'react';
import { AttachmentProps } from './types';
export type AIEmployeeChatContext = {
attachments?: Record<string, AttachmentProps>;
actions?: Record<
string,
{
title: string;
description?: string;
icon?: React.ReactNode;
action: (aiMessage: string) => void;
}
>;
variableScopes?: any;
};
export const AIEmployeeChatContext = createContext<AIEmployeeChatContext>({} as AIEmployeeChatContext);
export const AIEmployeeChatProvider: React.FC<AIEmployeeChatContext> = withDynamicSchemaProps((props) => {
return <AIEmployeeChatContext.Provider value={props}>{props.children}</AIEmployeeChatContext.Provider>;
});
export const useAIEmployeeChatContext = () => {
return useContext(AIEmployeeChatContext);
};

View File

@ -12,9 +12,11 @@ import { AIEmployee, Message, ResendOptions, SendOptions } from '../types'; //
import React, { useState } from 'react';
import { uid } from '@formily/shared';
import { useT } from '../../locale';
import { useAPIClient, useRequest } from '@nocobase/client';
import { useAPIClient, usePlugin, useRequest } from '@nocobase/client';
import { useChatConversations } from './ChatConversationsProvider';
import { useLoadMoreObserver } from './useLoadMoreObserver';
import { useAISelectionContext } from '../selector/AISelectorProvider';
import PluginAIClient from '../..';
interface ChatMessagesContextValue {
messages: Message[];
@ -41,6 +43,8 @@ export const useChatMessages = () => useContext(ChatMessagesContext);
export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
const t = useT();
const api = useAPIClient();
const { ctx } = useAISelectionContext();
const plugin = usePlugin('ai') as PluginAIClient;
const [messages, setMessages] = useState<Message[]>([]);
const [responseLoading, setResponseLoading] = useState(false);
const { currentConversation } = useChatConversations();
@ -101,11 +105,15 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
});
};
const processStreamResponse = async (stream: any) => {
const processStreamResponse = async (stream: any, sessionId: string, aiEmployee: AIEmployee) => {
const reader = stream.getReader();
const decoder = new TextDecoder();
let result = '';
let error = false;
let tool: {
name: string;
args: any;
};
try {
// eslint-disable-next-line no-constant-condition
@ -123,14 +131,22 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
for (const line of lines) {
try {
const data = JSON.parse(line.replace(/^data: /, ''));
if (data.body) content += data.body;
if (data.type === 'error') error = true;
if (data.body && typeof data.body === 'string') {
content += data.body;
}
if (data.type === 'error') {
error = true;
}
if (data.type === 'tool') {
tool = data.body;
}
} catch (e) {
console.error('Error parsing stream data:', e);
}
}
result += content;
if (result) {
updateLastMessage((last) => ({
...last,
content: {
@ -140,6 +156,18 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
loading: false,
}));
}
if (tool) {
console.log(ctx, tool);
const t = plugin.aiManager.tools.get(tool.name);
if (t) {
await t.invoke(ctx, tool.args);
callTool({
sessionId,
aiEmployee,
});
}
}
}
} catch (err) {
console.error(err);
if (err.name !== 'AbortError') {
@ -225,7 +253,7 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
return;
}
await processStreamResponse(sendRes.data);
await processStreamResponse(sendRes.data, sessionId, aiEmployee);
messagesServiceRef.current.run(sessionId);
} catch (err) {
if (err.name === 'CanceledError') {
@ -272,7 +300,7 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
return;
}
await processStreamResponse(sendRes.data);
await processStreamResponse(sendRes.data, sessionId, aiEmployee);
messagesServiceRef.current.run(sessionId);
} catch (err) {
if (err.name === 'CanceledError') {
@ -302,7 +330,8 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
setResponseLoading(false);
}, [currentConversation]);
const callTool = useCallback(async ({ sessionId, messageId, aiEmployee }) => {
const callTool = useCallback(
async ({ sessionId, messageId, aiEmployee }: { sessionId: string; messageId?: string; aiEmployee: AIEmployee }) => {
addMessage({
key: uid(),
role: aiEmployee.username,
@ -320,12 +349,14 @@ export const ChatMessagesProvider: React.FC<{ children: React.ReactNode }> = ({
adapter: 'fetch',
});
await processStreamResponse(sendRes.data);
await processStreamResponse(sendRes.data, sessionId, aiEmployee);
messagesServiceRef.current.run(sessionId);
} catch (err) {
throw err;
}
}, []);
},
[],
);
const loadMoreMessages = useCallback(async () => {
const messagesService = messagesServiceRef.current;

View File

@ -20,13 +20,14 @@ import { Schema } from '@formily/react';
import PluginAIClient from '../..';
import { useChatMessages } from './ChatMessagesProvider';
import { useChatBoxContext } from './ChatBoxContext';
import { useAISelectionContext } from '../selector/AISelectorProvider';
const useDefaultAction = (messageId: string) => {
const currentEmployee = useChatBoxContext('currentEmployee');
const { currentConversation } = useChatConversations();
const { callTool } = useChatMessages();
return {
callAction: () => {
invoke: () => {
callTool({
sessionId: currentConversation,
messageId,
@ -42,16 +43,20 @@ const CallButton: React.FC<{
args: any;
}> = ({ name, messageId, args }) => {
const t = useT();
const { ctx } = useAISelectionContext();
const plugin = usePlugin('ai') as PluginAIClient;
const tool = plugin.aiManager.tools.get(name);
const useAction = tool?.useAction || useDefaultAction;
const { callAction } = useAction(messageId);
const { invoke: invokeDefault } = useDefaultAction(messageId);
const invoke = async () => {
await tool?.invoke?.(ctx, args);
invokeDefault();
};
return (
<Button
onClick={(e) => {
e.stopPropagation();
callAction(args);
invoke();
}}
variant="link"
color="primary"

View File

@ -23,10 +23,6 @@ import { useAISelectionContext } from './ai-employees/selector/AISelectorProvide
import { googleGenAIProviderOptions } from './llm-providers/google-genai';
import { AIEmployeeTrigger } from './workflow/triggers/ai-employee';
const { AIEmployeesProvider } = lazy(() => import('./ai-employees/AIEmployeesProvider'), 'AIEmployeesProvider');
const { AIEmployeeChatProvider } = lazy(
() => import('./ai-employees/AIEmployeeChatProvider'),
'AIEmployeeChatProvider',
);
const { Employees } = lazy(() => import('./ai-employees/manager/Employees'), 'Employees');
const { LLMServices } = lazy(() => import('./llm-services/LLMServices'), 'LLMServices');
const { MessagesSettings } = lazy(() => import('./chat-settings/Messages'), 'MessagesSettings');
@ -49,7 +45,6 @@ export class PluginAIClient extends Plugin {
this.app.use(AIEmployeesProvider);
this.app.addComponents({
AIEmployeeButton,
AIEmployeeChatProvider,
AIContextCollector,
CardItem: withAISelectable(CardItem, {
selectType: 'blocks',
@ -101,11 +96,9 @@ export class PluginAIClient extends Plugin {
Component: MessagesSettings,
});
this.aiManager.registerTool('formFiller', {
useAction() {
const { ctx } = useAISelectionContext();
return {
callAction: (params) => {
invoke: (ctx, params) => {
const { form: uid, data } = params;
console.log(params);
if (!uid || !data) {
return;
}
@ -114,8 +107,7 @@ export class PluginAIClient extends Plugin {
return;
}
form.values = data;
},
};
console.log('====', form.values);
},
});

View File

@ -42,9 +42,7 @@ export const MessageRenderer: React.FC<{
}}
>
{typeof content === 'string' && <Markdown markdown={content} />}
{msg.tool_calls?.length && !msg.metadata?.autoCallTool && (
<ToolCard tools={msg.tool_calls} messageId={msg.messageId} />
)}
{msg.tool_calls?.length && <ToolCard tools={msg.tool_calls} messageId={msg.messageId} />}
</div>
);
};

View File

@ -21,9 +21,7 @@ export type LLMProviderOptions = {
};
export type ToolOptions = {
useAction: () => {
callAction: (params: any) => void | Promise<void>;
};
invoke: (ctx: any, params: any) => void | Promise<void>;
};
export class AIManager {

View File

@ -10,6 +10,7 @@
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
import { LLMProvider } from './provider';
import axios from 'axios';
import { Model } from '@nocobase/database';
export class GoogleGenAIProvider extends LLMProvider {
declare chatModel: ChatGoogleGenerativeAI;
@ -62,6 +63,40 @@ export class GoogleGenAIProvider extends LLMProvider {
return { code: 500, errMsg: e.message };
}
}
parseResponseMessage(message: Model) {
const { content: rawContent, messageId, metadata, role, toolCalls } = message;
const autoCallTool = metadata?.autoCallTool;
const content = {
...rawContent,
messageId,
metadata,
};
if (!autoCallTool && toolCalls) {
content.tool_calls = toolCalls;
}
if (Array.isArray(content.content) && autoCallTool) {
const messages = content.content.filter((msg) => msg.type !== 'functionCaller');
const hasText = messages.some((msg) => msg.type === 'text');
if (!hasText && toolCalls?.length) {
messages.unshift({
type: 'text',
text: 'Im trying to use my skills to complete the task.',
});
}
content.content = messages;
}
return {
key: messageId,
content,
role,
};
}
}
export const googleGenAIProviderOptions = {

View File

@ -11,6 +11,8 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import axios from 'axios';
import { parseMessages } from './handlers/parse-messages';
import { Application } from '@nocobase/server';
import { Model } from '@nocobase/database';
import { parseResponseMessage } from '../utils';
export abstract class LLMProvider {
serviceOptions: Record<string, any>;
@ -27,7 +29,7 @@ export abstract class LLMProvider {
constructor(opts: {
app: Application;
serviceOptions: any;
serviceOptions?: any;
chatOptions?: {
messages?: any[];
tools?: any[];
@ -97,4 +99,8 @@ export abstract class LLMProvider {
return { code: 500, errMsg: e.message };
}
}
parseResponseMessage(message: Model) {
return parseResponseMessage(message);
}
}

View File

@ -13,6 +13,7 @@ import { Registry } from '@nocobase/utils';
import { ZodObject } from 'zod';
import zodToJsonSchema from 'zod-to-json-schema';
import PluginAIServer from '../plugin';
import { Context } from '@nocobase/actions';
export type LLMProviderOptions = {
title: string;
@ -27,10 +28,11 @@ export type LLMProviderOptions = {
interface BaseToolProps {
title: string;
description: string;
execution?: 'frontend' | 'backend';
name?: string;
schema?: any;
invoke?: (
plugin: PluginAIServer,
invoke: (
ctx: Context,
args: Record<string, any>,
) => Promise<{
status: 'success' | 'error';
@ -107,6 +109,7 @@ export class AIManager {
} else {
result.invoke = tool.invoke;
result.schema = processSchema(tool.schema);
result.execution = tool.execution;
}
return result;

View File

@ -12,6 +12,7 @@ import PluginAIServer from '../plugin';
import { Model } from '@nocobase/database';
import { concat } from '@langchain/core/utils/stream';
import { LLMProvider } from '../../../server';
import { parseResponseMessage } from '../utils';
async function parseUISchema(ctx: Context, content: string) {
const regex = /\{\{\$nUISchema\.([^}]+)\}\}/g;
@ -58,7 +59,7 @@ async function formatMessages(ctx: Context, messages: any[]) {
formattedMessages.push({
role: 'tool',
content,
tool_call_id: msg.toolCalls?.id,
tool_call_id: msg.metadata?.toolCall?.id,
});
continue;
}
@ -222,7 +223,7 @@ async function processChatStream(
}
if (gathered?.tool_calls?.length && aiEmployee.skillSettings?.autoCall) {
await callTool(ctx, gathered.tool_calls[0], sessionId, aiEmployee);
await callTool(ctx, gathered.tool_calls[0], sessionId, aiEmployee, true);
}
ctx.res.end();
@ -273,14 +274,21 @@ async function callTool(
},
sessionId: string,
aiEmployee: Model,
autoCall = false,
) {
const plugin = ctx.app.pm.get('ai') as PluginAIServer;
try {
const tool = await plugin.aiManager.getTool(toolCall.name);
if (!tool) {
sendErrorResponse(ctx, 'Tool not found');
return;
}
const result = await tool.invoke(plugin, toolCall.args);
if (tool.execution === 'frontend' && autoCall) {
ctx.res.write(`data: ${JSON.stringify({ type: 'tool', body: toolCall })}\n\n`);
ctx.res.end();
return;
}
const result = await tool.invoke(ctx, toolCall.args);
if (result.status === 'error') {
sendErrorResponse(ctx, result.content);
}
@ -303,11 +311,11 @@ async function callTool(
type: 'text',
content: result.content,
},
toolCalls: toolCall,
metadata: {
model,
provider: service.provider,
autoCallTool: aiEmployee.skillSettings?.autoCall,
toolCall,
},
},
});
@ -399,6 +407,7 @@ export default {
},
async getMessages(ctx: Context, next: Next) {
const plugin = ctx.app.pm.get('ai') as PluginAIServer;
const userId = ctx.auth?.user.id;
if (!userId) {
return ctx.throw(403);
@ -444,19 +453,15 @@ export default {
ctx.body = {
rows: data.map((row: Model) => {
const content = {
...row.content,
messageId: row.messageId,
metadata: row.metadata,
};
if (!row.metadata?.autoCallTool && row.toolCalls) {
content.tool_calls = row.toolCalls;
const providerOptions = plugin.aiManager.llmProviders.get(row.metadata?.provider);
if (!providerOptions) {
return parseResponseMessage(row);
}
return {
key: row.messageId,
content,
role: row.role,
};
const Provider = providerOptions.provider;
const provider = new Provider({
app: ctx.app,
});
return provider.parseResponseMessage(row);
}),
hasMore,
cursor: newCursor,
@ -536,7 +541,7 @@ export default {
const { provider, model, service } = await getLLMService(ctx, employee, formattedMessages);
const { stream, signal } = await prepareChatStream(ctx, sessionId, provider);
await processChatStream(ctx, stream, sessionId, {
aiEmployee,
aiEmployee: employee,
signal,
model,
provider: service.provider,
@ -651,8 +656,9 @@ export default {
setupSSEHeaders(ctx);
const { sessionId, messageId } = ctx.action.params.values || {};
if (!sessionId || !messageId) {
sendErrorResponse(ctx, 'sessionId and messageId are required');
if (!sessionId) {
sendErrorResponse(ctx, 'sessionId is required');
return next();
}
try {
const conversation = await ctx.db.getRepository('aiConversations').findOne({
@ -664,16 +670,31 @@ export default {
});
if (!conversation) {
sendErrorResponse(ctx, 'conversation not found');
return next();
}
const employee = conversation.aiEmployee;
const message = await ctx.db.getRepository('aiConversations.messages', sessionId).findOne({
let message: Model;
if (messageId) {
message = await ctx.db.getRepository('aiConversations.messages', sessionId).findOne({
filter: {
messageId,
},
});
} else {
message = await ctx.db.getRepository('aiConversations.messages', sessionId).findOne({
sort: ['-messageId'],
});
}
if (!message) {
sendErrorResponse(ctx, 'message not found');
return next();
}
const tools = message.toolCalls;
if (!tools?.length) {
sendErrorResponse(ctx, 'No tool calls found');
return next();
}
await callTool(ctx, tools[0], sessionId, employee);
} catch (err) {

View File

@ -13,8 +13,15 @@ import { ToolOptions } from '../manager/ai-manager';
export const formFillter: ToolOptions = {
title: '{{t("Form filler")}}',
description: '{{t("Fill the form with the given content")}}',
execution: 'frontend',
schema: z.object({
form: z.string().describe('The UI Schema ID of the target form to be filled.'),
data: z.record(z.any()).describe("Structured data matching the form's JSON Schema, to be assigned to form.values."),
}),
invoke: async () => {
return {
status: 'success',
content: 'I have filled the form with the provided data.',
};
},
};

View File

@ -11,6 +11,7 @@ import { ToolOptions } from '../manager/ai-manager';
import { z } from 'zod';
import PluginAIServer from '../plugin';
import PluginWorkflowServer, { Processor, EXECUTION_STATUS } from '@nocobase/plugin-workflow';
import { Context } from '@nocobase/actions';
interface ParameterConfig {
name: string;
@ -81,8 +82,8 @@ const buildSchema = (config: ToolConfig): z.ZodObject<any> => {
return schema.describe(config.description || '');
};
const invoke = async (plugin: PluginAIServer, workflow: Workflow, args: Record<string, any>) => {
const workflowPlugin = plugin.app.pm.get('workflow') as PluginWorkflowServer;
const invoke = async (ctx: Context, workflow: Workflow, args: Record<string, any>) => {
const workflowPlugin = ctx.app.pm.get('workflow') as PluginWorkflowServer;
const processor = (await workflowPlugin.trigger(workflow as any, {
...args,
})) as Processor;
@ -114,7 +115,7 @@ export const workflowCaller: ToolOptions = {
title: workflow.title,
description: workflow.description,
schema: buildSchema(config),
invoke: async (plugin: PluginAIServer, args: Record<string, any>) => invoke(plugin, workflow, args),
invoke: async (ctx: Context, args: Record<string, any>) => invoke(ctx, workflow, args),
};
});
},
@ -133,7 +134,7 @@ export const workflowCaller: ToolOptions = {
title: workflow.title,
description: workflow.description,
schema: buildSchema(config),
invoke: async (plugin: PluginAIServer, args: Record<string, any>) => invoke(plugin, workflow, args),
invoke: async (ctx: Context, args: Record<string, any>) => invoke(ctx, workflow, args),
};
},
};

View File

@ -0,0 +1,26 @@
/**
* This file is part of the NocoBase (R) project.
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
* Authors: NocoBase Team.
*
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
* For more information, please refer to: https://www.nocobase.com/agreement.
*/
import { Model } from '@nocobase/database';
export function parseResponseMessage(row: Model) {
const content = {
...row.content,
messageId: row.messageId,
metadata: row.metadata,
};
if (!row.metadata?.autoCallTool && row.toolCalls) {
content.tool_calls = row.toolCalls;
}
return {
key: row.messageId,
content,
role: row.role,
};
}