Merge branch 'feature_20250930_work_flow' of https://github.com/MuSan-Li/ruoyi-ai into main

# Conflicts:
#	pom.xml
#	ruoyi-admin/pom.xml
#	ruoyi-modules/pom.xml
#	ruoyi-modules/ruoyi-chat/src/main/java/org/ruoyi/chat/service/chat/impl/DeepSeekChatImpl.java
This commit is contained in:
lihao05
2025-10-21 10:17:50 +08:00
116 changed files with 6839 additions and 25 deletions

View File

@@ -0,0 +1,432 @@
# Ruoyi-AI 工作流模块详细说明文档
## 概述
Ruoyi-AI 工作流模块是一个基于 LangGraph4j 的智能工作流引擎支持可视化工作流设计、AI 模型集成、条件分支、人机交互等高级功能。该模块采用微服务架构,提供完整的 RESTful API 和流式响应支持。
## 模块架构
### 1. 模块结构
```
ruoyi-ai/
├── ruoyi-modules/
│ └── ruoyi-workflow/ # 工作流核心模块
│ ├── pom.xml
│ └── src/main/java/org/ruoyi/workflow/
│ └── controller/ # 控制器层
│ ├── WorkflowController.java
│ ├── WorkflowRuntimeController.java
│ └── admin/ # 管理端控制器
│ ├── AdminWorkflowController.java
│ └── AdminWorkflowComponentController.java
└── ruoyi-modules-api/
└── ruoyi-workflow-api/ # 工作流API模块
├── pom.xml
└── src/main/java/org/ruoyi/workflow/
├── entity/ # 实体类
├── dto/ # 数据传输对象
├── service/ # 服务接口
├── mapper/ # 数据访问层
├── workflow/ # 工作流核心逻辑
├── enums/ # 枚举类
├── util/ # 工具类
└── exception/ # 异常处理
```
### 2. 核心依赖
- **LangGraph4j**: 1.5.3 - 工作流图执行引擎
- **LangChain4j**: 1.2.0 - AI 模型集成框架
- **Spring Boot**: 3.x - 应用框架
- **MyBatis Plus**: 数据访问层
- **Redis**: 缓存和状态管理
- **Swagger/OpenAPI**: API 文档
## 核心功能
### 1. 工作流管理
#### 1.1 工作流定义
- **创建工作流**: 支持自定义标题、描述、公开性设置
- **编辑工作流**: 可视化节点编辑、连接线配置
- **版本控制**: 支持工作流的版本管理和回滚
- **权限管理**: 支持公开/私有工作流设置
#### 1.2 工作流执行
- **流式执行**: 基于 SSE 的实时流式响应
- **状态管理**: 完整的执行状态跟踪
- **错误处理**: 详细的错误信息和异常处理
- **中断恢复**: 支持工作流中断和恢复执行
### 2. 节点类型
#### 2.1 基础节点
- **Start**: 开始节点,定义工作流入口
- **End**: 结束节点,定义工作流出口
#### 2.2 AI 模型节点
- **Answer**: 大语言模型问答节点
- **Dalle3**: DALL-E 3 图像生成
- **Tongyiwanx**: 通义万相图像生成
- **Classifier**: 内容分类节点
#### 2.3 数据处理节点
- **DocumentExtractor**: 文档信息提取
- **KeywordExtractor**: 关键词提取
- **FaqExtractor**: 常见问题提取
- **KnowledgeRetrieval**: 知识库检索
#### 2.4 控制流节点
- **Switcher**: 条件分支节点
- **HumanFeedback**: 人机交互节点
#### 2.5 外部集成节点
- **Google**: Google 搜索集成
- **MailSend**: 邮件发送
- **HttpRequest**: HTTP 请求
- **Template**: 模板转换
### 3. 数据流管理
#### 3.1 输入输出定义
```java
// 节点输入输出数据结构
public class NodeIOData {
private String name; // 参数名称
private NodeIODataContent content; // 参数内容
}
// 支持的数据类型
public enum WfIODataTypeEnum {
TEXT, // 文本
NUMBER, // 数字
BOOLEAN, // 布尔值
FILES, // 文件
OPTIONS // 选项
}
```
#### 3.2 参数引用
- **节点间引用**: 支持上游节点输出作为下游节点输入
- **参数映射**: 自动处理参数名称映射
- **类型转换**: 自动进行数据类型转换
## 数据库设计
### 1. 核心表结构
#### 1.1 工作流定义表 (t_workflow)
```sql
CREATE TABLE t_workflow (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
uuid VARCHAR(32) NOT NULL DEFAULT '',
title VARCHAR(100) NOT NULL DEFAULT '',
remark TEXT NOT NULL DEFAULT '',
user_id BIGINT NOT NULL DEFAULT 0,
is_public TINYINT(1) NOT NULL DEFAULT 0,
is_enable TINYINT(1) NOT NULL DEFAULT 1,
create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
update_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
is_deleted TINYINT(1) NOT NULL DEFAULT 0
);
```
#### 1.2 工作流节点表 (t_workflow_node)
```sql
CREATE TABLE t_workflow_node (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
uuid VARCHAR(32) NOT NULL DEFAULT '',
workflow_id BIGINT NOT NULL DEFAULT 0,
workflow_component_id BIGINT NOT NULL DEFAULT 0,
user_id BIGINT NOT NULL DEFAULT 0,
title VARCHAR(100) NOT NULL DEFAULT '',
remark VARCHAR(500) NOT NULL DEFAULT '',
input_config JSON NOT NULL DEFAULT ('{}'),
node_config JSON NOT NULL DEFAULT ('{}'),
position_x DOUBLE NOT NULL DEFAULT 0,
position_y DOUBLE NOT NULL DEFAULT 0,
create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
update_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
is_deleted TINYINT(1) NOT NULL DEFAULT 0
);
```
#### 1.3 工作流边表 (t_workflow_edge)
```sql
CREATE TABLE t_workflow_edge (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
uuid VARCHAR(32) NOT NULL DEFAULT '',
workflow_id BIGINT NOT NULL DEFAULT 0,
source_node_uuid VARCHAR(32) NOT NULL DEFAULT '',
source_handle VARCHAR(32) NOT NULL DEFAULT '',
target_node_uuid VARCHAR(32) NOT NULL DEFAULT '',
create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
update_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
is_deleted TINYINT(1) NOT NULL DEFAULT 0
);
```
#### 1.4 工作流运行时表 (t_workflow_runtime)
```sql
CREATE TABLE t_workflow_runtime (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
uuid VARCHAR(32) NOT NULL DEFAULT '',
user_id BIGINT NOT NULL DEFAULT 0,
workflow_id BIGINT NOT NULL DEFAULT 0,
input JSON NOT NULL DEFAULT ('{}'),
output JSON NOT NULL DEFAULT ('{}'),
status SMALLINT NOT NULL DEFAULT 1,
status_remark VARCHAR(250) NOT NULL DEFAULT '',
create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
update_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
is_deleted TINYINT(1) NOT NULL DEFAULT 0
);
```
#### 1.5 工作流组件表 (t_workflow_component)
```sql
CREATE TABLE t_workflow_component (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
uuid VARCHAR(32) DEFAULT '' NOT NULL,
name VARCHAR(32) DEFAULT '' NOT NULL,
title VARCHAR(100) DEFAULT '' NOT NULL,
remark TEXT NOT NULL,
display_order INT DEFAULT 0 NOT NULL,
is_enable TINYINT(1) DEFAULT 0 NOT NULL,
create_time DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
update_time DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
is_deleted TINYINT(1) DEFAULT 0 NOT NULL
);
```
## API 接口
### 1. 工作流管理接口
#### 1.1 基础操作
```http
#
POST /workflow/add
Content-Type: application/json
{
"title": "",
"remark": "",
"isPublic": false
}
#
POST /workflow/update
Content-Type: application/json
{
"uuid": "UUID",
"title": "",
"remark": ""
}
#
POST /workflow/del/{uuid}
# /
POST /workflow/enable/{uuid}?enable=true
```
#### 1.2 搜索和查询
```http
#
GET /workflow/mine/search?keyword=&isPublic=true&currentPage=1&pageSize=10
#
GET /workflow/public/search?keyword=&currentPage=1&pageSize=10
#
GET /workflow/public/component/list
```
### 2. 工作流执行接口
#### 2.1 流式执行
```http
#
POST /workflow/run
Content-Type: application/json
Accept: text/event-stream
{
"uuid": "UUID",
"inputs": [
{
"name": "input",
"content": {
"type": 1,
"textContent": ""
}
}
]
}
```
#### 2.2 运行时管理
```http
#
POST /workflow/runtime/resume/{runtimeUuid}
Content-Type: application/json
{
"feedbackContent": ""
}
#
GET /workflow/runtime/page?wfUuid=UUID&currentPage=1&pageSize=10
#
GET /workflow/runtime/nodes/{runtimeUuid}
#
POST /workflow/runtime/clear?wfUuid=UUID
```
### 3. 管理端接口
#### 3.1 工作流管理
```http
#
POST /admin/workflow/search
Content-Type: application/json
{
"title": "",
"isPublic": true,
"isEnable": true
}
# /
POST /admin/workflow/enable?uuid=UUID&isEnable=true
```
## 核心实现
### 1. 工作流引擎 (WorkflowEngine)
工作流引擎是整个模块的核心,负责:
- 工作流图的构建和编译
- 节点执行调度
- 状态管理和持久化
- 流式输出处理
```java
public class WorkflowEngine {
// 核心执行方法
public void run(User user, List<ObjectNode> userInputs, SseEmitter sseEmitter) {
// 1. 验证工作流状态
// 2. 创建运行时实例
// 3. 构建状态图
// 4. 执行工作流
// 5. 处理流式输出
}
// 恢复执行方法
public void resume(String userInput) {
// 1. 更新状态
// 2. 继续执行
}
}
```
### 2. 节点工厂 (WfNodeFactory)
节点工厂负责根据组件类型创建对应的节点实例:
```java
public class WfNodeFactory {
public static AbstractWfNode create(WorkflowComponent component,
WorkflowNode node,
WfState wfState,
WfNodeState nodeState) {
// 根据组件类型创建对应的节点实例
switch (component.getName()) {
case "Answer":
return new LLMAnswerNode(component, node, wfState, nodeState);
case "Switcher":
return new SwitcherNode(component, node, wfState, nodeState);
// ... 其他节点类型
}
}
}
```
### 3. 图构建器 (WorkflowGraphBuilder)
图构建器负责将工作流定义转换为可执行的状态图:
```java
public class WorkflowGraphBuilder {
public StateGraph<WfNodeState> build(WorkflowNode startNode) {
// 1. 构建编译节点树
// 2. 转换为状态图
// 3. 添加节点和边
// 4. 处理条件分支
// 5. 处理并行执行
}
}
```
## 流式响应机制
### 1. SSE 事件类型
工作流执行过程中会发送多种类型的 SSE 事件:
```javascript
// 节点开始执行
[NODE_RUN_节点UUID] - 节点执行开始事件
// 节点输入数据
[NODE_INPUT_节点UUID] - 节点输入数据事件
// 节点输出数据
[NODE_OUTPUT_节点UUID] - 节点输出数据事件
// 流式内容块
[NODE_CHUNK_节点UUID] - 流式内容块事件
// 等待用户输入
[NODE_WAIT_FEEDBACK_BY_节点UUID] - 等待用户输入事件
```
### 2. 流式处理流程
1. **初始化**: 创建工作流运行时实例
2. **节点执行**: 逐个执行工作流节点
3. **实时输出**: 通过 SSE 实时推送执行结果
4. **状态更新**: 实时更新节点和工作流状态
5. **错误处理**: 捕获并处理执行过程中的错误
## 扩展开发
### 1. 自定义节点开发
要开发自定义工作流节点,需要:
1. **创建节点类**:继承 `AbstractWfNode`
2. **实现处理逻辑**:重写 `onProcess()` 方法
3. **定义配置类**:创建节点配置类
4. **注册组件**:在组件表中注册新组件
```java
public class CustomNode extends AbstractWfNode {
@Override
protected NodeProcessResult onProcess() {
// 实现自定义处理逻辑
List<NodeIOData> outputs = new ArrayList<>();
// ... 处理逻辑
return NodeProcessResult.success(outputs);
}
}
```
### 2. 自定义组件注册
```sql
-- 在 t_workflow_component 表中添加新组件
INSERT INTO t_workflow_component (uuid, name, title, remark, is_enable)
VALUES (REPLACE(UUID(), '-', ''), 'CustomNode', '自定义节点', '自定义节点描述', true);
```

22
pom.xml
View File

@@ -20,7 +20,7 @@
<java.version>17</java.version>
<mysql.version>8.0.33</mysql.version>
<mybatis.version>3.5.16</mybatis.version>
<springdoc.version>2.8.5</springdoc.version>
<springdoc.version>2.8.13</springdoc.version>
<therapi-javadoc.version>0.15.0</therapi-javadoc.version>
<poi.version>5.2.3</poi.version>
<easyexcel.version>3.2.1</easyexcel.version>
@@ -265,13 +265,6 @@
<version>${lock4j.version}</version>
</dependency>
<!-- xxl-job-core -->
<dependency>
<groupId>com.xuxueli</groupId>
<artifactId>xxl-job-core</artifactId>
<version>${xxl-job.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>transmittable-thread-local</artifactId>
@@ -348,6 +341,19 @@
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-workflow</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-workflow-api</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</dependencyManagement>

View File

@@ -57,6 +57,11 @@
<artifactId>ruoyi-generator</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-workflow</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-aihuman</artifactId>

View File

@@ -16,9 +16,9 @@ spring:
master:
type: ${spring.datasource.type}
driverClassName: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/ruoyi-ai?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8&autoReconnect=true&rewriteBatchedStatements=true
username: root
password: root
url: jdbc:mysql://47.112.190.27:3306/ruoyi-ai?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8&autoReconnect=true&rewriteBatchedStatements=true
username: ruoyi-ai
password: 5YEAWhSFZXKaMGxi
hikari:
# 最大连接池数量
@@ -37,6 +37,8 @@ spring:
connectionTestQuery: SELECT 1
# 多久检查一次连接的活性
keepaliveTime: 30000
mail:
username: xx
--- # redis 单机配置(单机与集群只能开启一个另一个需要注释掉)
spring.data:
@@ -102,5 +104,13 @@ pdf:
#百炼模型配置
dashscope:
key: sk-xxxx
model: qvq-max
local:
images: xx
files: xx

View File

@@ -156,6 +156,8 @@ security:
# actuator 监控配置
- /actuator
- /actuator/**
- /workflow/**
- /admin/workflow/**
# 多租户配置
tenant:
# 是否开启

View File

@@ -17,6 +17,7 @@
<module>ruoyi-chat-api</module>
<module>ruoyi-knowledge-api</module>
<module>ruoyi-system-api</module>
<module>ruoyi-workflow-api</module>
</modules>
<properties>

View File

@@ -18,7 +18,7 @@ import java.io.Serializable;
*/
@Data
@ExcelIgnoreUnannotated
@AutoMapper(target = ChatConfig.class)
@AutoMapper(target = ChatConfig.class)
public class ChatConfigVo implements Serializable {
@Serial

View File

@@ -0,0 +1,133 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-modules-api</artifactId>
<version>${revision}</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>ruoyi-workflow-api</artifactId>
<description>
工作流API模块
</description>
<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-system-api</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-common-satoken</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-common-mail</artifactId>
</dependency>
<dependency>
<groupId>org.ruoyi</groupId>
<artifactId>ruoyi-chat</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-core</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.8.12</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.bsc.langgraph4j</groupId>
<artifactId>langgraph4j-core</artifactId>
<version>1.5.3</version>
</dependency>
<dependency>
<groupId>org.bsc.langgraph4j</groupId>
<artifactId>langgraph4j-langchain4j</artifactId>
<version>1.5.3</version>
</dependency>
<dependency>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-annotations</artifactId>
<version>2.2.8</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>1.2.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-community-dashscope</artifactId>
<version>1.2.0-beta8</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-generator</artifactId>
<version>3.5.3.1</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-http-client-jdk</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-document-parser-apache-poi</artifactId>
<version>1.2.0-beta8</version>
</dependency>
<dependency>
<groupId>com.google.api-client</groupId>
<artifactId>google-api-client</artifactId>
<version>2.6.0</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,43 @@
package org.ruoyi.workflow;
import com.baomidou.mybatisplus.generator.FastAutoGenerator;
import com.baomidou.mybatisplus.generator.config.OutputFile;
import com.baomidou.mybatisplus.generator.config.rules.DbColumnType;
import java.sql.Types;
import java.util.Collections;
public class CodeGenerator {
public static void main(String[] args) {
FastAutoGenerator.create("jdbc:postgres://172.17.30.40:5432/aideepin?useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&tinyInt1isBit=false&allowMultiQueries=true", "postgres", "postgres")
.globalConfig(builder -> {
builder.author("moyz") // 设置作者
.enableSwagger() // 开启 swagger 模式
.fileOverride() // 覆盖已生成文件
.outputDir("D://"); // 指定输出目录
})
.dataSourceConfig(builder -> builder.typeConvertHandler((globalConfig, typeRegistry, metaInfo) -> {
int typeCode = metaInfo.getJdbcType().TYPE_CODE;
if (typeCode == Types.SMALLINT) {
// 自定义类型转换
return DbColumnType.INTEGER;
}
return typeRegistry.getColumnType(metaInfo);
}))
.packageConfig(builder -> {
builder.mapper("com.adi.common.mapper")
.parent("")
.moduleName("")
.entity("po")
.serviceImpl("service.impl")
.pathInfo(Collections.singletonMap(OutputFile.xml, "D://mybatisplus-generatorcode")); // 设置mapperXml生成路径
})
.strategyConfig(builder -> {
builder.addInclude("adi_knowledge_base_qa_record") // 设置需要生成的表名
.addTablePrefix("adi_");
builder.mapperBuilder().enableBaseResultMap().enableMapperAnnotation().build();
})
.execute();
}
}

View File

@@ -0,0 +1,51 @@
package org.ruoyi.workflow.base;
import lombok.Data;
import org.ruoyi.workflow.enums.ErrorEnum;
import java.io.Serializable;
@Data
public class BaseResponse<T> implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 是否成功
*/
private boolean success;
/**
* 状态码
*/
private String code;
/**
* 提示
*/
private String message;
/**
* 数据
*/
private T data;
public BaseResponse() {
}
public BaseResponse(boolean success) {
this.success = success;
}
public BaseResponse(boolean success, T data) {
this.data = data;
this.success = success;
}
public BaseResponse(String code, String message, T data) {
this.code = code;
this.success = false;
this.message = message;
this.data = data;
}
public static BaseResponse success(String message) {
return new BaseResponse(ErrorEnum.SUCCESS.getCode(), message, "");
}
}

View File

@@ -0,0 +1,118 @@
package org.ruoyi.workflow.base;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedJdbcTypes;
import org.apache.ibatis.type.MappedTypes;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.workflow.WfNodeInputConfig;
import org.ruoyi.workflow.workflow.def.WfNodeIO;
import org.ruoyi.workflow.workflow.def.WfNodeParamRef;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import static org.ruoyi.workflow.workflow.WfNodeIODataUtil.INPUT_TYPE_TO_NODE_IO_DEF;
@Slf4j
@MappedJdbcTypes({JdbcType.JAVA_OBJECT})
@MappedTypes({WfNodeInputConfig.class})
public class NodeInputConfigTypeHandler extends BaseTypeHandler<WfNodeInputConfig> {
public static WfNodeInputConfig fillNodeInputConfig(String jsonSource) {
ObjectNode jsonNode = (ObjectNode) JsonUtil.toJsonNode(jsonSource);
return createNodeInputConfig(jsonNode);
}
public static WfNodeInputConfig createNodeInputConfig(ObjectNode jsonNode) {
List<WfNodeIO> userInputs = new ArrayList<>();
WfNodeInputConfig result = new WfNodeInputConfig();
result.setUserInputs(userInputs);
result.setRefInputs(new ArrayList<>());
if (null == jsonNode) {
return result;
}
ArrayNode userInputsJson = jsonNode.withArray("user_inputs");
ArrayNode refInputs = jsonNode.withArray("ref_inputs");
if (!userInputsJson.isEmpty()) {
for (JsonNode userInput : userInputsJson) {
if (userInput instanceof ObjectNode objectNode) {
int type = objectNode.get("type").asInt();
Class<? extends WfNodeIO> nodeIOClass = INPUT_TYPE_TO_NODE_IO_DEF.get(WfIODataTypeEnum.getByValue(type));
WfNodeIO wfNodeIO = JsonUtil.fromJson(objectNode, nodeIOClass);
if (null != wfNodeIO) {
userInputs.add(wfNodeIO);
} else {
log.warn("用户输入格式不正确:{}", userInput);
}
}
}
}
if (!refInputs.isEmpty()) {
List<WfNodeParamRef> list = JsonUtil.fromArrayNode(refInputs, WfNodeParamRef.class);
if (CollectionUtils.isNotEmpty(list)) {
result.setRefInputs(list);
} else {
log.warn("引用输入格式不正确:{}", refInputs);
}
}
return result;
}
@Override
public void setNonNullParameter(PreparedStatement ps, int i, WfNodeInputConfig parameter, JdbcType jdbcType) {
// PGobject jsonObject = new PGobject();
// jsonObject.setType("jsonb");
// try {
// jsonObject.setValue(JsonUtil.toJson(parameter));
// ps.setObject(i, jsonObject);
// } catch (Exception e) {
// throw new RuntimeException(e);
// }
}
@Override
public WfNodeInputConfig getNullableResult(ResultSet rs, String columnName) throws SQLException {
String jsonSource = rs.getString(columnName);
if (jsonSource != null) {
try {
return fillNodeInputConfig(jsonSource);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return null;
}
@Override
public WfNodeInputConfig getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
String jsonSource = rs.getString(columnIndex);
if (jsonSource != null) {
return fillNodeInputConfig(jsonSource);
}
return null;
}
@Override
public WfNodeInputConfig getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
String jsonSource = cs.getString(columnIndex);
if (jsonSource != null) {
try {
return fillNodeInputConfig(jsonSource);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return null;
}
}

View File

@@ -0,0 +1,122 @@
package org.ruoyi.workflow.base;
import cn.dev33.satoken.stp.StpUtil;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.common.core.domain.model.LoginUser;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.common.satoken.utils.LoginHelper;
import org.ruoyi.workflow.entity.User;
import org.ruoyi.workflow.enums.UserStatusEnum;
import static org.ruoyi.workflow.enums.ErrorEnum.A_USER_NOT_FOUND;
/**
* 线程上下文适配器,统一接入 Sa-Token 登录态。
*/
public class ThreadContext {
private static final ThreadLocal<User> CURRENT_USER = new ThreadLocal<>();
private static final ThreadLocal<String> CURRENT_TOKEN = new ThreadLocal<>();
private ThreadContext() {
}
/**
* 获取当前登录的工作流用户。
*/
public static User getCurrentUser() {
User cached = CURRENT_USER.get();
if (cached != null) {
return cached;
}
LoginUser loginUser = LoginHelper.getLoginUser();
if (loginUser == null) {
throw new BaseException(A_USER_NOT_FOUND.getInfo());
}
User mapped = mapToWorkflowUser(loginUser);
CURRENT_USER.set(mapped);
return mapped;
}
/**
* 允许在测试或特殊场景下显式设置当前用户。
*/
public static void setCurrentUser(User user) {
if (user == null) {
CURRENT_USER.remove();
} else {
CURRENT_USER.set(user);
}
}
/**
* 获取当前登录用户 ID。
*/
public static Long getCurrentUserId() {
Long userId = LoginHelper.getUserId();
if (userId != null) {
return userId;
}
return getCurrentUser().getId();
}
/**
* 获取当前访问 token。
*/
public static String getToken() {
String token = CURRENT_TOKEN.get();
if (StringUtils.isNotBlank(token)) {
return token;
}
try {
token = StpUtil.getTokenValue();
} catch (Exception ignore) {
token = null;
}
if (StringUtils.isNotBlank(token)) {
CURRENT_TOKEN.set(token);
}
return token;
}
public static void setToken(String token) {
if (StringUtils.isBlank(token)) {
CURRENT_TOKEN.remove();
} else {
CURRENT_TOKEN.set(token);
}
}
public static boolean isLogin() {
return LoginHelper.isLogin();
}
public static User getExistCurrentUser() {
return getCurrentUser();
}
public static void unload() {
CURRENT_USER.remove();
CURRENT_TOKEN.remove();
}
private static User mapToWorkflowUser(LoginUser loginUser) {
User user = new User();
user.setId(loginUser.getUserId());
String nickname = loginUser.getNickName();
user.setName(StringUtils.defaultIfBlank(nickname, loginUser.getUsername()));
user.setEmail(loginUser.getUsername());
user.setUuid(String.valueOf(loginUser.getUserId()));
user.setUserStatus(UserStatusEnum.NORMAL);
user.setIsAdmin(LoginHelper.isSuperAdmin(loginUser.getUserId()));
user.setUnderstandContextMsgPairNum(0);
user.setQuotaByTokenDaily(0);
user.setQuotaByTokenMonthly(0);
user.setQuotaByRequestDaily(0);
user.setQuotaByRequestMonthly(0);
user.setQuotaByImageDaily(0);
user.setQuotaByImageMonthly(0);
user.setIsDeleted(false);
return user;
}
}

View File

@@ -0,0 +1,76 @@
package org.ruoyi.workflow.config;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.ruoyi.workflow.util.LocalDateTimeUtil;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.client.RestTemplate;
@Slf4j
@Configuration
public class BeanConfig {
@Bean
public RestTemplate restTemplate() {
log.info("Configuration:create restTemplate");
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
// 设置建立连接超时时间 毫秒
requestFactory.setConnectTimeout(60000);
// 设置读取数据超时时间 毫秒
requestFactory.setReadTimeout(60000);
RestTemplate restTemplate = new RestTemplate();
// 注册LOG拦截器
// restTemplate.setInterceptors(Lists.newArrayList(new LogClientHttpRequestInterceptor()));
restTemplate.setRequestFactory(new BufferingClientHttpRequestFactory(requestFactory));
return restTemplate;
}
@Bean
@Primary
public ObjectMapper objectMapper() {
log.info("Configuration:create objectMapper");
ObjectMapper objectMapper = new Jackson2ObjectMapperBuilder().createXmlMapper(false).build();
objectMapper.registerModules(LocalDateTimeUtil.getSimpleModule(), new JavaTimeModule(), new Jdk8Module());
//设置null值不参与序列化(字段不被显示)
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
return objectMapper;
}
@Bean(name = "mainExecutor")
@Primary
public AsyncTaskExecutor mainExecutor() {
int processorsNum = Runtime.getRuntime().availableProcessors();
log.info("mainExecutor,processorsNum:{}", processorsNum);
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(processorsNum * 2);
executor.setMaxPoolSize(100);
return executor;
}
@Bean(name = "imagesExecutor")
public AsyncTaskExecutor imagesExecutor() {
int processorsNum = Runtime.getRuntime().availableProcessors();
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
log.info("imagesExecutor corePoolSize:{},maxPoolSize:{}", processorsNum, processorsNum * 2);
executor.setCorePoolSize(processorsNum);
executor.setMaxPoolSize(processorsNum * 2);
return executor;
}
@Bean(name = "beanValidator")
public LocalValidatorFactoryBean validator() {
return new LocalValidatorFactoryBean();
}
}

View File

@@ -0,0 +1,407 @@
package org.ruoyi.workflow.cosntant;
import dev.langchain4j.model.input.PromptTemplate;
import lombok.extern.slf4j.Slf4j;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class AdiConstant {
public static final int DEFAULT_PAGE_SIZE = 10;
/**
* 验证码id过期时间1小时
*/
public static final int AUTH_CAPTCHA_ID_EXPIRE = 1;
/**
* 验证码过期时间5分钟
*/
public static final int AUTH_CAPTCHA_EXPIRE = 5;
/**
* 注册激活码有效时长8小时
*/
public static final int AUTH_ACTIVE_CODE_EXPIRE = 8;
/**
* token存活时间8小时
*/
public static final int USER_TOKEN_EXPIRE = 8;
public static final String DEFAULT_PASSWORD = "123456";
public static final int LOGIN_MAX_FAIL_TIMES = 3;
public static final String[] WEB_RESOURCES = {
"/swagger-ui/index.html",
"/swagger-ui",
"/swagger-resources",
"/v3/api-docs",
"/favicon.ico",
".css",
".js",
"/doc.html"
};
public static final int SECRET_KEY_TYPE_SYSTEM = 1;
public static final int SECRET_KEY_TYPE_CUSTOM = 2;
public static final String OPENAI_MESSAGE_DONE_FLAG = "[DONE]";
public static final String DEFAULT_MODEL = "gpt-3.5-turbo";
public static final String CREATE_IMAGE_RESP_FORMATS_B64JSON = "b64_json";
public static final String OPENAI_CREATE_IMAGE_RESP_FORMATS_URL = "url";
public static final List<String> DALLE2_CREATE_IMAGE_SIZES = List.of("256x256", "512x512", "1024x1024");
public static final List<String> DALLE3_CREATE_IMAGE_SIZES = List.of("1024x1024", "1024x1792", "1792x1024");
public static final PromptTemplate PROMPT_EXTRA_TEMPLATE = PromptTemplate.from("""
## 要求
尽可能准确地回答用户的问题
## 用户的问题
{{question}}
## 注意
{{extraInfo}}
""");
public static final PromptTemplate PROMPT_INFO_TEMPLATE = PromptTemplate.from("""
## 要求
根据已知信息,尽可能准确地回答用户的问题
## 用户的问题
{{question}}
## 已知信息
{{information}}
## 注意
回答的内容不能让用户感知到已知信息的存在
""");
/**
* 可能的 extraInfo 如适用转音频的要求: 2. 回答的内容要尽量口语化,以方便将内容转成语音
*/
public static final PromptTemplate PROMPT_INFO_EXTRA_TEMPLATE = PromptTemplate.from("""
## 要求
根据已知信息,尽可能准确地回答用户的问题
## 用户的问题
{{question}}
## 已知信息
{{information}}
## 注意
1. 回答的内容不能让用户感知到已知信息的存在
{{extraInfo}}
""");
public static final String PROMPT_EXTRA_AUDIO = "2. 回答的内容要尽量口语化,以方便将内容转成语音";
public static final Double LLM_TEMPERATURE_DEFAULT = 0.7D;
public static final Double RAG_RETRIEVE_MIN_SCORE_DEFAULT = 0.6D;
public static final int tts_ = 1;
public static final String[] POI_DOC_TYPES = {"doc", "docx", "ppt", "pptx", "xls", "xlsx"};
public static final long SSE_TIMEOUT = (2 * 60 + 30) * 1000L; // 2.5分钟
public static final int RAG_TYPE_KB = 1;
public static final int RAG_TYPE_SEARCH = 2;
/**
* 每块文档长度按token算
*/
public static final int RAG_MAX_SEGMENT_SIZE_IN_TOKENS = 1000;
/**
* 文档召回默认数量
*/
public static final int RAG_RETRIEVE_NUMBER_DEFAULT = 3;
/**
* 文档召回最大数量
*/
public static final int RAG_RETRIEVE_NUMBER_MAX = 5;
/**
* 向量搜索时命中所需的最低分数
*/
public static final double RAG_MIN_SCORE = 0.6;
/**
* 默认的最大输入token数
*/
public static final int LLM_MAX_INPUT_TOKENS_DEFAULT = 4096;
public static final String LLM_INPUT_TYPE_TEXT = "text";
public static final String LLM_INPUT_TYPE_IMAGE = "image";
public static final String LLM_INPUT_TYPE_AUDIO = "audio";
public static final String LLM_INPUT_TYPE_VIDEO = "video";
public static final String[] GRAPH_ENTITY_EXTRACTION_ENTITY_TYPES = {"organization", "person", "geo", "event"};
public static final String GRAPH_TUPLE_DELIMITER = "<|>";
public static final String GRAPH_RECORD_DELIMITER = "##";
public static final String GRAPH_COMPLETION_DELIMITER = "<|COMPLETE|>";
public static final List<String> GRAPH_STORE_MAIN_FIELDS = List.of("name", "label", "text_segment_id", "description");
/**
* 唯一标识字段如果该字段有指定则根据该配置判断Vertex或Edge是否唯一如知识库中根据 name、metadata->>kb_uuid 来做判断
*/
public static final String GRAPH_METADATA_IDENTIFY_COLUMNS = "graph_metadata_identify_columns";
/**
* 内容追加字段
* 更新数据时,如遇到该标识中的字段,追加内容而不是替换
*/
public static final String GRAPH_METADATA_APPEND_COLUMNS = "graph_metadata_append_columns_if_exist";
public static final int AI_IMAGE_TYPE_REGULAR = 1;
public static final int AI_IMAGE_TYPE_THUMBNAIL = 2;
public static final int AI_IMAGE_TYPE_REGULAR_MARK = 3;
public static final int AI_IMAGE_TYPE_THUMBNAIL_MARK = 4;
public static final String DOC_INDEX_TYPE_EMBEDDING = "embedding";
public static final String DOC_INDEX_TYPE_GRAPHICAL = "graphical";
public static final String DRAW_TYPE_PUBLIC = "public";
public static final String DRAW_TYPE_STARRED = "starred";
public static final String DRAW_TYPE_MINE = "mine";
public static final String MP_LIMIT_1 = "limit 1";
/**
* 文件存储在本地
*/
public static final int STORAGE_LOCATION_LOCAL = 1;
/**
* 文件存储到阿里云OSS
*/
public static final int STORAGE_LOCATION_ALI_OSS = 2;
public static final String URL_PREFIX_FILE = "/file/";
public static final String URL_PREFIX_IMAGE = "/image/";
public static final String URL_PREFIX_MY_IMAGE = "/my-image/";
public static final String URL_PREFIX_MY_THUMBNAIL = "/my-thumbnail/";
public static final List<String> IMAGE_EXTENSIONS = List.of("jpg", "jpeg", "png", "gif", "bmp", "webp");
public static final String W_FAILED = "FAILED";
public static final String COLUMN_NAME_IS_DELETE = "is_deleted";
public static final String COLUMN_NAME_USER_ID = "user_id";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_UUID = "uuid";
public static final String FORM_DATA_BOUNDARY_PRE = "----WebKitFormBoundary";
private AdiConstant() {
}
public static class ConversationConstant {
public static final String DEFAULT_NAME = "通用智能助手";
public static final int ANSWER_CONTENT_TYPE_AUTO = 1;
public static final int ANSWER_CONTENT_TYPE_TEXT = 2;
public static final int ANSWER_CONTENT_TYPE_AUDIO = 3;
public static final String AUDIO_CONFIG_FIELD_ANSWER_VOICE = "answer_voice";
public static final String AUDIO_CONFIG_FIELD_VOICE_PLATFORM = "platform";
private ConversationConstant() {
}
}
public static class GenerateImage {
public static final int INTERACTING_METHOD_GENERATE_IMAGE = 1;
public static final int INTERACTING_METHOD_EDIT_IMAGE = 2;
public static final int INTERACTING_METHOD_VARIATION = 3;
public static final int INTERACTING_METHOD_BACKGROUND_GENERATION = 4;
public static final int STATUS_DOING = 1;
public static final int STATUS_FAIL = 2;
public static final int STATUS_SUCCESS = 3;
private GenerateImage() {
}
}
public static class MetadataKey {
public static final String KB_UUID = "kb_uuid";
public static final String KB_ITEM_UUID = "kb_item_uuid";
public static final String ENGINE_NAME = "engine_name";
public static final String SEARCH_UUID = "search_uuid";
private MetadataKey() {
}
}
public static class SysConfigKey {
public static final String DEEPSEEK_SETTING = "deepseek_setting";
public static final String OPENAI_SETTING = "openai_setting";
public static final String DASHSCOPE_SETTING = "dashscope_setting";
public static final String QIANFAN_SETTING = "qianfan_setting";
public static final String OLLAMA_SETTING = "ollama_setting";
public static final String SILICONFLOW_SETTING = "siliconflow_setting";
public static final String GOOGLE_SETTING = "google_setting";
public static final String BING_SETTING = "bing_setting";
public static final String BAIDU_SETTING = "baidu_setting";
public static final String REQUEST_TEXT_RATE_LIMIT = "request_text_rate_limit";
public static final String REQUEST_IMAGE_RATE_LIMIT = "request_image_rate_limit";
public static final String CONVERSATION_MAX_NUM = "conversation_max_num";
public static final String QUOTA_BY_TOKEN_DAILY = "quota_by_token_daily";
public static final String QUOTA_BY_TOKEN_MONTHLY = "quota_by_token_monthly";
public static final String QUOTA_BY_REQUEST_DAILY = "quota_by_request_daily";
public static final String QUOTA_BY_REQUEST_MONTHLY = "quota_by_request_monthly";
public static final String QUOTA_BY_IMAGE_DAILY = "quota_by_image_daily";
public static final String QUOTA_BY_IMAGE_MONTHLY = "quota_by_image_monthly";
public static final String QUOTA_BY_QA_ASK_DAILY = "quota_by_qa_ask_daily";
public static final String STORAGE_LOCATION = "storage_location";
public static final String STORAGE_LOCATION_ALI_OSS = "storage_location_ali_oss";
public static final String ASR_SETTING = "asr_setting";
public static final String TTS_SETTING = "tts_setting";
private SysConfigKey() {
}
}
public static class ModelPlatform {
public static final String DEEPSEEK = "deepseek";
public static final String OPENAI = "openai";
public static final String DASHSCOPE = "dashscope";
public static final String QIANFAN = "qianfan";
public static final String OLLAMA = "ollama";
public static final String SILICONFLOW = "siliconflow";
private ModelPlatform() {
}
// 获取所有公共静态常量String类型的值的列表
public static List<String> getModelConstants() {
List<String> list = new ArrayList<>();
Class<ModelPlatform> clazz = ModelPlatform.class;
for (Field field : clazz.getDeclaredFields()) {
try {
String value = (String) field.get(null);
list.add(value);
} catch (ReflectiveOperationException e) {
log.error("error", e);
}
}
return list;
}
}
public static class ModelType {
public static final String TEXT = "text";
public static final String IMAGE = "image";
public static final String EMBEDDING = "embedding";
public static final String RERANK = "rerank";
public static final String ASR = "asr";
public static final String TTS = "tts";
private ModelType() {
}
public static List<String> getModelType() {
List<String> list = new ArrayList<>();
Class<ModelType> clazz = ModelType.class;
for (Field field : clazz.getDeclaredFields()) {
try {
String value = (String) field.get(null);
list.add(value);
} catch (ReflectiveOperationException e) {
log.error("error", e);
}
}
return list;
}
}
public static class SearchEngineName {
public static final String GOOGLE = "google";
public static final String BING = "bing";
public static final String BAIDU = "baidu";
public static final String[] GOOGLE_COUNTRIES = {"cn", "af", "al", "dz", "as", "ad", "ao", "ai", "aq", "ag", "ar", "am", "aw", "au", "at", "az", "bs", "bh", "bd", "bb", "by", "be", "bz", "bj", "bm", "bt", "bo", "ba", "bw", "bv", "br", "io", "bn", "bg", "bf", "bi", "kh", "cm", "ca", "cv", "ky", "cf", "td", "cl", "cx", "cc", "co", "km", "cg", "cd", "ck", "cr", "ci", "hr", "cu", "cy", "cz", "dk", "dj", "dm", "do", "ec", "eg", "sv", "gq", "er", "ee", "et", "fk", "fo", "fj", "fi", "fr", "gf", "pf", "tf", "ga", "gm", "ge", "de", "gh", "gi", "gr", "gl", "gd", "gp", "gu", "gt", "gn", "gw", "gy", "ht", "hm", "va", "hn", "hk", "hu", "is", "in", "id", "ir", "iq", "ie", "il", "it", "jm", "jp", "jo", "kz", "ke", "ki", "kp", "kr", "kw", "kg", "la", "lv", "lb", "ls", "lr", "ly", "li", "lt", "lu", "mo", "mk", "mg", "mw", "my", "mv", "ml", "mt", "mh", "mq", "mr", "mu", "yt", "mx", "fm", "md", "mc", "mn", "ms", "ma", "mz", "mm", "na", "nr", "np", "nl", "an", "nc", "nz", "ni", "ne", "ng", "nu", "nf", "mp", "no", "om", "pk", "pw", "ps", "pa", "pg", "py", "pe", "ph", "pn", "pl", "pt", "pr", "qa", "re", "ro", "ru", "rw", "sh", "kn", "lc", "pm", "vc", "ws", "sm", "st", "sa", "sn", "rs", "sc", "sl", "sg", "sk", "si", "sb", "so", "za", "gs", "es", "lk", "sd", "sr", "sj", "sz", "se", "ch", "sy", "tw", "tj", "tz", "th", "tl", "tg", "tk", "to", "tt", "tn", "tr", "tm", "tc", "tv", "ug", "ua", "ae", "uk", "gb", "us", "um", "uy", "uz", "vu", "ve", "vn", "vg", "vi", "wf", "eh", "ye", "zm", "zw"};
public static final String[] GOOGLE_LANGUAGES = {"zh-cn", "zh-tw", "af", "ak", "sq", "ws", "am", "ar", "hy", "az", "eu", "be", "bem", "bn", "bh", "xx-bork", "bs", "br", "bg", "bt", "km", "ca", "chr", "ny", "co", "hr", "cs", "da", "nl", "xx-elmer", "en", "eo", "et", "ee", "fo", "tl", "fi", "fr", "fy", "gaa", "gl", "ka", "de", "el", "kl", "gn", "gu", "xx-hacker", "ht", "ha", "haw", "iw", "hi", "hu", "is", "ig", "id", "ia", "ga", "it", "ja", "jw", "kn", "kk", "rw", "rn", "xx-klingon", "kg", "ko", "kri", "ku", "ckb", "ky", "lo", "la", "lv", "ln", "lt", "loz", "lg", "ach", "mk", "mg", "ms", "ml", "mt", "mv", "mi", "mr", "mfe", "mo", "mn", "sr-me", "my", "ne", "pcm", "nso", "no", "nn", "oc", "or", "om", "ps", "fa", "xx-pirate", "pl", "pt", "pt-br", "pt-pt", "pa", "qu", "ro", "rm", "nyn", "ru", "gd", "sr", "sh", "st", "tn", "crs", "sn", "sd", "si", "sk", "sl", "so", "es", "es-419", "su", "sw", "sv", "tg", "ta", "tt", "te", "th", "ti", "to", "lua", "tum", "tr", "tk", "tw", "ug", "uk", "ur", "uz", "vu", "vi", "cy", "wo", "xh", "yi", "yo", "zu"};
private SearchEngineName() {
}
}
public static class SSEEventName {
public static final String START = "[START]";
public static final String DONE = "[DONE]";
public static final String ERROR = "[ERROR]";
public static final String META = "[META]";
public static final String AUDIO = "[AUDIO]";
public static final String THINKING = "[THINKING]";
public static final String AI_SEARCH_SOURCE_LINKS = "[SOURCE_LINKS]";
public static final String WF_NODE_CHUNK = "[WF_NODE_CHUNK]";
public static final String WF_NODE_OUTPUT = "[WF_NODE_OUTPUT]";
public static final String STATE_CHANGED = "[STATE_CHANGED]";
private SSEEventName() {
}
}
public static class SSEEventData {
/**
* 状态:问题分析中
* 如敏感词校验等
*/
public static final String STATE_QUESTION_ANALYSING = """
{"state":"question_analysing","remark":"问题分析中"}
""";
public static final String STATE_KNOWLEDGE_SEARCHING = """
{"state":"knowledge_searching","remark":"知识库搜索中"}
""";
//使用 THINKING 事件代替
public static final String STATE_THINKING = """
{"state":"thinking","remark":"推理中"}
""";
public static final String STATE_RESPONDING = """
{"state":"responding","remark":"回答中"}
""";
}
public static class WorkflowConstant {
public static final String DEFAULT_INPUT_PARAM_NAME = "input";
public static final String DEFAULT_OUTPUT_PARAM_NAME = "output";
public static final String DEFAULT_ERROR_OUTPUT_PARAM_NAME = "error_msg";
public static final String HUMAN_FEEDBACK_KEY = "human_feedback";
public static final int NODE_PROCESS_STATUS_READY = 1;
public static final int NODE_PROCESS_STATUS_DOING = 2;
public static final int NODE_PROCESS_STATUS_SUCCESS = 3;
public static final int NODE_PROCESS_STATUS_FAIL = 4;
public static final int WORKFLOW_PROCESS_STATUS_READY = 1;
public static final int WORKFLOW_PROCESS_STATUS_DOING = 2;
public static final int WORKFLOW_PROCESS_STATUS_SUCCESS = 3;
public static final int WORKFLOW_PROCESS_STATUS_FAIL = 4;
public static final int WORKFLOW_PROCESS_STATUS_WAITING_INPUT = 5;
public static final int WORKFLOW_NODE_PROCESS_TYPE_NORMAL = 1;
public static final int WORKFLOW_NODE_PROCESS_TYPE_CONDITIONAL = 2;
public static final int WORKFLOW_NODE_PROCESS_TYPE_PARALLEL = 3;
public static final int MAIL_SENDER_TYPE_SYS = 1;
public static final int MAIL_SENDER_TYPE_CUSTOM = 2;
}
public static class TokenEstimator {
public static String OPENAI = "openai";
public static String HUGGING_FACE = "huggingface";
public static String QWEN = "qwen";
public static List<String> ALL = List.of(OPENAI, HUGGING_FACE, QWEN);
}
public static class EmbeddingModel {
public static String ALL_MINILM_L6 = "local:all-minilm-l6-v2";
}
public static class McpConstant {
public static final String TRANSPORT_TYPE_SSE = "sse";
public static final String TRANSPORT_TYPE_STDIO = "stdio";
public static final String INSTALL_TYPE_REMOTE = "remote";
public static final String INSTALL_TYPE_WASM = "wasm";
public static final String INSTALL_TYPE_LOCAL = "local";
public static final String INSTALL_TYPE_DOCKER = "docker";
}
public static class TtsConstant {
/**
* 语音合成器位置-客户端
*/
public static final String SYNTHESIZER_CLIENT = "client";
/**
* 语音合成器位置-服务端
*/
public static final String SYNTHESIZER_SERVER = "server";
/**
* 通义默认语音音色-龙应严(义正严辞女声)
*/
public static final String DASHSCOPE_DEFAULT_VOICE = "longyingyan";
}
public static class CustomChatRequestParameterKeys {
/**
* 是否开启思考模式,默认不开启
*/
public static final String ENABLE_THINKING = "enable_thinking";
private CustomChatRequestParameterKeys() {
}
}
}

View File

@@ -0,0 +1,116 @@
package org.ruoyi.workflow.cosntant;
public class RedisKeyConstant {
/**
* 账号激活码的key
*/
public static final String AUTH_ACTIVE_CODE = "auth:activeCode:{0}";
/**
* 注册时使用的验证码
* 参数验证码id
* 值:验证码
*/
public static final String AUTH_REGISTER_CAPTCHA_ID = "auth:register:captcha:{0}";
/**
* 登录时使用的验证码id缓存
* 参数验证码id
* 值:验证码
*/
public static final String AUTH_LOGIN_CAPTCHA_ID = "auth:login:captcha:{0}";
/**
* 注册验证码缓存
* 参数:验证码
* 值1
*/
public static final String AUTH_CAPTCHA = "auth:register:captcha:{0}";
/**
* 登录token
* {0}:用户token
* 值json.format(user)
*/
public static final String USER_TOKEN = "user:token:{0}";
/**
* 参数游客的uuid
* 值json.format(guest)
*/
public static final String GUEST_UUID = "guest:uuid:{0}";
/**
* 登录失败次数
* 参数:用户邮箱
* 值: 失效次数
*/
public static final String LOGIN_FAIL_COUNT = "user:login:fail:{0}";
/**
* 用户是否请求ai中
* 参数用户id
* 值: 1或者0
*/
public static final String USER_ASKING = "user:asking:{0}";
/**
* 用户是否画画中
* 参数用户id
* 值: 1或者0
*/
public static final String USER_DRAWING = "user:drawing:{0}";
/**
* 用户提问限流计数
* 参数用户id
* 值: 当前时间窗口访问量
*/
public static final String USER_REQUEST_TEXT_TIMES = "user:request-text:times:{0}";
public static final String USER_REQUEST_IMAGE_TIMES = "user:request-image:times:{0}";
/**
* 用户信息缓存
* 参数用户id
* 值: user object
*/
public static final String USER_INFO = "user:info:";
/**
* 找回密码的请求绑在
* 参数:随机数
* 值: 用户id用于校验后续流程中的重置密码使用
*/
public static final String FIND_MY_PASSWORD = "user:find:password:{0}";
/**
* qa提问次数每天
* 参数用户id:日期yyyyMMdd
* 值:提问数量
*/
public static final String AQ_ASK_TIMES = "qa:ask:limit:{0}:{1}";
/**
* 知识库知识点生成数量
* 值: 用户id
*/
public static final String QA_ITEM_CREATE_LIMIT = "aq:item:create:{0}";
/**
* 信号(重新生成知识库统计数据)
* 值:知识库uuid
*/
public static final String KB_STATISTIC_RECALCULATE_SIGNAL = "kb:statistic:recalculate:signal";
public static final String STATISTIC = "statistic";
public static final String STATISTIC_USER = "user";
public static final String STATISTIC_KNOWLEDGE_BASE = "kb";
public static final String STATISTIC_TOKEN_COST = "token-cost";
public static final String STATISTIC_CONVERSATION = "conversation";
public static final String STATISTIC_IMAGE_COST = "image-cost";
public static final String TOKEN_USAGE_KEY = "token:usage:{0}";
/**
* 用户正在对文档进行索引
* 值用户id
*/
public static final String USER_INDEXING = "user:indexing:{0}";
/**
* 用户评论并发限制
* 值用户id
*/
public static final String DRAW_COMMENT_LIMIT_KEY = "user:draw:comment-submitting:{0}";
public static final String WORKFLOW_KEY = "workflow";
public static final String WORKFLOW_COMPONENTS = "workflow:components";
public static final String WORKFLOW_COMPONENT_START_KEY = "workflow:component:start";
public static final String WORKFLOW_COMPONENT_KEY = "workflow:component";
public static final String WORKFLOW_COPY_DOING = "workflow:copy:doing:{0}";
private RedisKeyConstant() {
}
}

View File

@@ -0,0 +1,17 @@
package org.ruoyi.workflow.dto.workflow;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Data
@Validated
public class WfAddReq {
@NotBlank
private String title;
private String remark;
private Boolean isPublic;
}

View File

@@ -0,0 +1,15 @@
package org.ruoyi.workflow.dto.workflow;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Validated
@Data
public class WfBaseInfoUpdateReq {
@NotBlank
private String uuid;
private String title;
private String remark;
private Boolean isPublic;
}

View File

@@ -0,0 +1,18 @@
package org.ruoyi.workflow.dto.workflow;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Data
@Validated
public class WfComponentReq {
private String uuid;
@NotBlank(message = "标题不能为空")
private String name;
@NotBlank(message = "标题不能为空")
private String title;
private String remark;
private Boolean isEnable;
private Integer displayOrder;
}

View File

@@ -0,0 +1,9 @@
package org.ruoyi.workflow.dto.workflow;
import lombok.Data;
@Data
public class WfComponentSearchReq {
private String title;
private Boolean isEnable;
}

View File

@@ -0,0 +1,25 @@
package org.ruoyi.workflow.dto.workflow;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Validated
@Data
public class WfEdgeReq {
private Long id;
@NotBlank
private String uuid;
@Min(1)
private Long workflowId;
@NotBlank
private String sourceNodeUuid;
private String sourceHandle;
@NotBlank
private String targetNodeUuid;
/**
* 是否新增
*/
private Boolean isNew;
}

View File

@@ -0,0 +1,32 @@
package org.ruoyi.workflow.dto.workflow;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Validated
@Data
public class WfNodeDto {
private Long id;
@NotBlank
@Size(min = 32, max = 32)
private String uuid;
private Long workflowId;
@Min(1)
private Long workflowComponentId;
@NotBlank
private String title;
private String remark;
@NotNull
private ObjectNode inputConfig;
@NotNull
private ObjectNode nodeConfig;
@NotNull
private Double positionX;
@NotNull
private Double positionY;
}

View File

@@ -0,0 +1,17 @@
package org.ruoyi.workflow.dto.workflow;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
@Validated
@Data
public class WfRuntimeNodeDto {
private Long id;
private String uuid;
private Long workflowRuntimeId;
private Long nodeId;
private ObjectNode input;
private ObjectNode output;
private Integer status;
}

View File

@@ -0,0 +1,22 @@
package org.ruoyi.workflow.dto.workflow;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.Data;
import java.time.LocalDateTime;
@Data
public class WfRuntimeResp {
private Long id;
private String uuid;
private Long workflowId;
private ObjectNode input;
private ObjectNode output;
private Integer status;
private String statusRemark;
private String workflowUuid;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime createTime;
}

View File

@@ -0,0 +1,10 @@
package org.ruoyi.workflow.dto.workflow;
import lombok.Data;
@Data
public class WfSearchReq {
private String title;
private Boolean isEnable;
private Boolean isPublic;
}

View File

@@ -0,0 +1,22 @@
package org.ruoyi.workflow.dto.workflow;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.List;
@Data
public class WorkflowResp {
private Long id;
private String uuid;
private String title;
private String remark;
private Boolean isPublic;
private Long userId;
private String userUuid;
private String userName;
private List<WfNodeDto> nodes;
private List<WfEdgeReq> edges;
private LocalDateTime createTime;
private LocalDateTime updateTime;
}

View File

@@ -0,0 +1,8 @@
package org.ruoyi.workflow.dto.workflow;
import lombok.Data;
@Data
public class WorkflowResumeReq {
private String feedbackContent;
}

View File

@@ -0,0 +1,14 @@
package org.ruoyi.workflow.dto.workflow;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.Data;
import java.util.List;
@Data
public class WorkflowRunReq {
private List<ObjectNode> inputs;
private String uuid;
}

View File

@@ -0,0 +1,24 @@
package org.ruoyi.workflow.dto.workflow;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import lombok.Data;
import org.springframework.validation.annotation.Validated;
import java.util.List;
@Validated
@Data
public class WorkflowUpdateReq {
@NotBlank
private String uuid;
@Size(min = 1)
private List<WfNodeDto> nodes;
@NotNull
private List<WfEdgeReq> edges;
private List<String> deleteNodes;
private List<String> deleteEdges;
}

View File

@@ -0,0 +1,29 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import java.io.Serializable;
import java.time.LocalDateTime;
@Data
public class BaseEntity implements Serializable {
private static final long serialVersionUID = 1L;
@TableId(type = IdType.AUTO)
private Long id;
@TableField(value = "create_time")
private LocalDateTime createTime;
@TableField(value = "update_time")
private LocalDateTime updateTime;
@Schema(title = "是否删除0未删除1已删除")
@TableField(value = "is_deleted")
private Boolean isDeleted;
}

View File

@@ -0,0 +1,66 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.UserStatusEnum;
import java.time.LocalDateTime;
@EqualsAndHashCode(callSuper = true)
@Data
@TableName("adi_user")
@Schema(title = "User对象")
public class User extends BaseEntity {
@Schema(name = "用户名称")
@TableField("name")
private String name;
@TableField("email")
private String email;
@TableField("password")
private String password;
@TableField("uuid")
private String uuid;
@Schema(name = "上下文理解中需要携带的消息对数量(提示词及回复)")
@TableField("understand_context_msg_pair_num")
private Integer understandContextMsgPairNum;
@Schema(name = "token quota in one day")
@TableField("quota_by_token_daily")
private Integer quotaByTokenDaily;
@Schema(name = "token quota in one month")
@TableField("quota_by_token_monthly")
private Integer quotaByTokenMonthly;
@Schema(name = "request quota in one day")
@TableField("quota_by_request_daily")
private Integer quotaByRequestDaily;
@Schema(name = "request quota in one month")
@TableField("quota_by_request_monthly")
private Integer quotaByRequestMonthly;
@TableField("quota_by_image_daily")
private Integer quotaByImageDaily;
@TableField("quota_by_image_monthly")
private Integer quotaByImageMonthly;
@TableField("user_status")
private UserStatusEnum userStatus;
@TableField("active_time")
private LocalDateTime activeTime;
@Schema(title = "是否管理员01")
@TableField(value = "is_admin")
private Boolean isAdmin;
}

View File

@@ -0,0 +1,37 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName("t_workflow")
@Schema(title = "工作流定义 | workflow definition")
public class Workflow extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("title")
private String title;
@TableField("remark")
private String remark;
@TableField("user_id")
private Long userId;
@TableField("is_public")
private Boolean isPublic;
@TableField("is_enable")
private Boolean isEnable;
}

View File

@@ -0,0 +1,37 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName(value = "t_workflow_component", autoResultMap = true)
@Schema(title = "工作流组件")
public class WorkflowComponent extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("name")
private String name;
@TableField("title")
private String title;
@TableField("remark")
private String remark;
@TableField("display_order")
private Integer displayOrder;
@TableField("is_enable")
private Boolean isEnable;
}

View File

@@ -0,0 +1,34 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName("t_workflow_edge")
@Schema(title = "工作流定义-边 | workflow definition edge")
public class WorkflowEdge extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("workflow_id")
private Long workflowId;
@TableField("source_node_uuid")
private String sourceNodeUuid;
@TableField("source_handle")
private String sourceHandle;
@TableField("target_node_uuid")
private String targetNodeUuid;
}

View File

@@ -0,0 +1,46 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName(value = "t_workflow_node", autoResultMap = true)
@Schema(title = "工作流定义-节点 | workflow definition node")
public class WorkflowNode extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("workflow_id")
private Long workflowId;
@TableField("workflow_component_id")
private Long workflowComponentId;
@TableField("title")
private String title;
@TableField("remark")
private String remark;
@TableField(value = "input_config")
private String inputConfig;
@TableField(value = "node_config")
private String nodeConfig;
@TableField("position_x")
private Double positionX;
@TableField("position_y")
private Double positionY;
}

View File

@@ -0,0 +1,40 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName(value = "t_workflow_runtime", autoResultMap = true)
@Schema(title = "工作流运行时 | Workflow runtime")
public class WorkflowRuntime extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("user_id")
private Long userId;
@TableField("workflow_id")
private Long workflowId;
@TableField(value = "input")
private String input;
@TableField(value = "output")
private String output;
@TableField("status")
private Integer status;
@TableField("status_remark")
private String statusRemark;
}

View File

@@ -0,0 +1,42 @@
package org.ruoyi.workflow.entity;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serial;
@Data
@EqualsAndHashCode(callSuper = true)
@TableName(value = "t_workflow_runtime_node", autoResultMap = true)
@Schema(title = "工作流实例-节点 | Workflow runtime - node")
public class WorkflowRuntimeNode extends BaseEntity {
@Serial
private static final long serialVersionUID = 1L;
@TableField("uuid")
private String uuid;
@TableField("user_id")
private Long userId;
@TableField("workflow_runtime_id")
private Long workflowRuntimeId;
@TableField("node_id")
private Long nodeId;
@TableField(value = "input")
private String input;
@TableField(value = "output")
private String output;
@TableField("status")
private Integer status;
@TableField("status_remark")
private String statusRemark;
}

View File

@@ -0,0 +1,14 @@
package org.ruoyi.workflow.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
@Getter
@AllArgsConstructor
public enum AiModelStatus implements BaseEnum {
ACTIVE(1, "启用"),
INACTIVE(2, "停用");
private final Integer value;
private final String desc;
}

View File

@@ -0,0 +1,12 @@
package org.ruoyi.workflow.enums;
import com.baomidou.mybatisplus.annotation.IEnum;
public interface BaseEnum extends IEnum<Integer> {
/**
* 获取对应名称
*
* @return String
*/
String getDesc();
}

View File

@@ -0,0 +1,127 @@
package org.ruoyi.workflow.enums;
import lombok.Getter;
@Getter
public enum ErrorEnum {
SUCCESS("00000", "成功"),
A_URL_NOT_FOUND("A0001", "地址不存在"),
A_PARAMS_ERROR("A0002", "参数校验不通过"),
A_REQUEST_TOO_MUCH("A0003", "访问次数太多"),
A_LOGIN_ERROR("A0004", "登陆失败,账号或密码错误"),
A_LOGIN_ERROR_MAX("A0005", "失败次数太多,请输入验证码重试"),
A_LOGIN_CAPTCHA_ERROR("A0006", "验证码不正确"),
A_USER_NOT_EXIST("A0007", "用户不存在"),
A_CONVERSATION_NOT_EXIST("A0008", "对话不存在"),
A_IMAGE_NUMBER_ERROR("A0009", "图片数量不对"),
A_IMAGE_SIZE_ERROR("A0010", "图片尺寸不对"),
A_FILE_NOT_EXIST("A0011", "文件不存在"),
A_DRAWING("A0012", "作图还未完成"),
A_USER_EXIST("A0013", "账号已经存在,请使用账号密码登录"),
A_FIND_PASSWORD_CODE_ERROR("A0014", "重置码已过期或不存在"),
A_USER_WAIT_CONFIRM("A0015", "用户未激活"),
A_USER_NOT_AUTH("A0016", "用户无权限"),
A_DATA_NOT_FOUND("A0017", "数据不存在"),
A_UPLOAD_FAIL("A0018", "上传失败"),
A_QA_ASK_LIMIT("A0019", "请求次数太多"),
A_QA_ITEM_LIMIT("A0020", "知识点生成已超额度"),
A_CONVERSATION_EXIST("A0021", "会话(角色)已存在"),
A_MODEL_NOT_FOUND("A0022", "模型不存在"),
A_MODEL_ALREADY_EXIST("A0023", "模型已存在"),
A_CONVERSATION_NOT_FOUND("A0024", "会话(角色)找不到"),
A_AI_IMAGE_NOT_FOUND("A0024", "图片找不到"),
A_ENABLE_MODEL_NOT_FOUND("A0025", "没有可用的模型"),
A_DOC_INDEX_DOING("A0026", "文档索引正在进行中,请稍后重试"),
A_PRESET_CONVERSATION_NOT_EXIST("A0027", "预设会话或角色不存在"),
A_CONVERSATION_TITLE_EXIST("A0028", "会话(角色)标题已存在"),
A_AI_IMAGE_NO_AUTH("A0029", "无权限查看该图片"),
A_USER_NOT_FOUND("A0030", "用户不存在"),
A_ACTIVE_CODE_INVALID("A0031", "激活码已失效"),
A_OLD_PASSWORD_INVALID("A0032", "原密码不正确"),
A_OPT_TOO_FREQUENTLY("A0032", "操作太频繁"),
A_DRAW_NOT_FOUND("A00033", "绘图记录找不到"),
A_WF_NOT_FOUND("A00034", "工作流找不到"),
A_WF_DISABLED("A0035", "工作流已停用"),
A_WF_NODE_NOT_FOUND("A0036", "工作流节点找不到"),
A_WF_NODE_CONFIG_NOT_FOUND("A0037", "工作流节点配置找不到"),
A_WF_NODE_CONFIG_ERROR("A0038", "工作流节点配置异常"),
A_WF_INPUT_INVALID("A0039", "工作流输入参数错误"),
A_WF_INPUT_MISSING("A0040", "工作流输入缺少参数"),
A_WF_MULTIPLE_START_NODE("A0041", "多个开始节点"),
A_WF_START_NODE_NOT_FOUND("A0042", "没有开始节点"),
A_WF_END_NODE_NOT_FOUND("A0043", "没有结束节点"),
A_WF_EDGE_NOT_FOUND("A0044", "工作流的边找不到"),
A_WF_RUNTIME_NOT_FOUND("A00045", "工作流运行时数据找不到"),
A_SEARCH_QUERY_IS_EMPTY("A00046", "搜索内容不能为空"),
A_WF_COMPONENT_NOT_FOUND("A00047", "工作流基础组件找不到"),
A_WF_RESUME_FAIL("A00048", "工作流恢复执行时失败"),
A_MAIL_SENDER_EMPTY("A00049", "邮件发送人不能为空"),
A_MAIL_SENDER_CONFIG_ERROR("A00050", "邮件发送人配置错误"),
A_MAIL_RECEIVER_EMPTY("A00051", "邮件接收人不能为空"),
A_MCP_SERVER_NOT_FOUND("A00052", "MCP服务找不到"),
A_USER_MCP_SERVER_NOT_FOUND("A00053", "用户的MCP服务找不到"),
A_PARAMS_INVALID_BY_("A00054", "参数校验异常:{0}"),
A_AI_MESSAGE_NOT_FOUND("A00055", "找不到AI的消息"),
A_USER_QUESTION_NOT_FOUND("A00056", "用户问题不存在"),
A_PLATFORM_NOT_MATCH("A0057", "平台不匹配"),
B_UNCAUGHT_ERROR("B0001", "未捕捉异常"),
B_COMMON_ERROR("B0002", "业务出错"),
B_GLOBAL_ERROR("B0003", "全局异常"),
B_SAVE_IMAGE_ERROR("B0004", "保存图片异常"),
B_FIND_IMAGE_404("B0005", "无法找到图片"),
B_DAILY_QUOTA_USED("B0006", "今天额度已经用完"),
B_MONTHLY_QUOTA_USED("B0007", "当月额度已经用完"),
B_LLM_NOT_SUPPORT("B0008", "LLM不支持该功能"),
B_LLM_SECRET_KEY_NOT_SET("B0009", "LLM的secret key没设置"),
B_MESSAGE_NOT_FOUND("B0008", "消息不存在"),
B_LLM_SERVICE_DISABLED("B0009", "LLM服务不可用"),
B_KNOWLEDGE_BASE_IS_EMPTY("B0010", "知识库内容为空"),
B_NO_ANSWER("B0011", "[无答案]"),
B_SAVE_FILE_ERROR("B0012", "保存文件异常"),
B_BREAK_SEARCH("B0013", "中断搜索"),
B_GRAPH_FILTER_NOT_FOUND("B0014", "图过滤器未定义"),
B_DB_ERROR("B0015", "数据库查询异常"),
B_ACTIVE_USER_ERROR("B0016", "激活用户失败"),
B_RESET_PASSWORD_ERROR("B0017", "重置密码失败"),
B_IMAGE_LOAD_ERROR("B0018", "加载图片失败"),
B_IO_EXCEPTION("B0019", "IO异常"),
B_SERVER_EXCEPTION("B0020", "服务端异常"),
B_DELETE_FILE_ERROR("B0021", "删除文件异常"),
B_WF_RUN_ERROR("B0022", "工作流运行异常"),
B_WF_NODE_DEFINITION_NOT_FOUND("B0023", "工作流节点定义找不到"),
B_DIR_CREATE_FAIL("B0024", "创建目录失败"),
B_LLM_TEMPERATURE_ERROR("B0025", "采样温度应该在 0.1-1之间"),
B_ASR_SETTING_NOT_FOUND("B0026", "语音识别设置未找到"),
B_URL_INVALID("B0027", "不是有效的网络地址"),
B_ASR_MODEL_NOT_FOUND("B0028", "语音识别模型未找到"),
B_TTS_SETTING_NOT_FOUND("B0029", "语音合成设置未找到"),
B_TTS_MODEL_NOT_FOUND("B0030", "语音合成模型未找到"),
B_VOICE_NOT_FOUND("B0031", "声音不存在"),
C_DRAW_FAIL("C0001", "大模型生成图片失败,原因:{0}"),
C_ALI_OSS_CONFIG_ERROR("C0002", "阿里云OSS初始化失败,原因:{0}"),
C_LLM_RESPONSE_INVALID("C0003", "大模型生成结果内容无效"),
C_WF_COMPONENT_DELETED_FAIL_BY_USED("C0004", "工作流组件已经被使用,无法被删除,可先停用");
private final String code;
private final String info;
ErrorEnum(String code, String info) {
this.code = code;
this.info = info;
}
public static ErrorEnum getErrorEnum(String code) {
ErrorEnum result = null;
for (ErrorEnum c : ErrorEnum.values()) {
if (c.getCode().equals(code)) {
result = c;
break;
}
}
if (null == result) {
result = B_COMMON_ERROR;
}
return result;
}
}

View File

@@ -0,0 +1,23 @@
package org.ruoyi.workflow.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Arrays;
@Getter
@AllArgsConstructor
public enum UserStatusEnum implements BaseEnum {
WAIT_CONFIRM(1, "待验证"),
NORMAL(2, "正常"),
FREEZE(3, "冻结");
private final Integer value;
private final String desc;
public static UserStatusEnum getByValue(Integer val) {
return Arrays.stream(UserStatusEnum.values()).filter(item -> item.value.equals(val)).findFirst().orElse(null);
}
}

View File

@@ -0,0 +1,25 @@
package org.ruoyi.workflow.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Arrays;
@Getter
@AllArgsConstructor
public enum WfIODataTypeEnum implements BaseEnum {
TEXT(1, "文本"),
NUMBER(2, "数字"),
OPTIONS(3, "下拉选项"),
FILES(4, "文件列表"),
BOOL(5, "布尔值"),
REF_INPUT(6, "引用节点的输入参数"),
REF_OUTPUT(7, "引用节点的输出参数");
private final Integer value;
private final String desc;
public static WfIODataTypeEnum getByValue(Integer val) {
return Arrays.stream(WfIODataTypeEnum.values()).filter(item -> item.value.equals(val)).findFirst().orElse(null);
}
}

View File

@@ -0,0 +1,138 @@
package org.ruoyi.workflow.helper;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.workflow.cosntant.AdiConstant;
import org.ruoyi.workflow.cosntant.RedisKeyConstant;
import org.ruoyi.workflow.entity.User;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
@Slf4j
@Service
public class SSEEmitterHelper {
private static final Cache<SseEmitter, Boolean> COMPLETED_SSE = CacheBuilder.newBuilder()
.expireAfterWrite(10, TimeUnit.MINUTES).build();
@Resource
private StringRedisTemplate stringRedisTemplate;
public static void parseAndSendPartialMsg(SseEmitter sseEmitter, String name, String content) {
if (Boolean.TRUE.equals(COMPLETED_SSE.getIfPresent(sseEmitter))) {
log.warn("sseEmitter already completed,name:{}", name);
return;
}
String[] lines = content.split("[\\r\\n]", -1);
if (lines.length > 1) {
sendPartial(sseEmitter, name, " " + lines[0]);
for (int i = 1; i < lines.length; i++) {
sendPartial(sseEmitter, name, "-_wrap_-");
sendPartial(sseEmitter, name, " " + lines[i]);
}
} else {
sendPartial(sseEmitter, name, " " + content);
}
}
public static void sendPartial(SseEmitter sseEmitter, String name, String msg) {
if (Boolean.TRUE.equals(COMPLETED_SSE.getIfPresent(sseEmitter))) {
log.warn("sseEmitter already completed,name:{}", name);
return;
}
try {
if (StringUtils.isNotBlank(name)) {
sseEmitter.send(SseEmitter.event().name(name).data(msg));
} else {
sseEmitter.send(msg);
}
} catch (IOException ioException) {
log.error("stream onNext error", ioException);
}
}
public boolean checkOrComplete(User user, SseEmitter sseEmitter) {
//Check: If still waiting response
String askingKey = MessageFormat.format(RedisKeyConstant.USER_ASKING, user.getId());
String askingVal = stringRedisTemplate.opsForValue().get(askingKey);
if (StringUtils.isNotBlank(askingVal)) {
sendErrorAndComplete(user.getId(), sseEmitter, "正在回复中...");
return false;
}
return true;
}
public void startSse(User user, SseEmitter sseEmitter, String data) {
String askingKey = MessageFormat.format(RedisKeyConstant.USER_ASKING, user.getId());
stringRedisTemplate.opsForValue().set(askingKey, "1", 15, TimeUnit.SECONDS);
try {
SseEmitter.SseEventBuilder builder = SseEmitter.event().name(AdiConstant.SSEEventName.START);
if (StringUtils.isNotBlank(data)) {
builder.data(data);
}
sseEmitter.send(builder);
} catch (IOException e) {
log.error("startSse error", e);
sseEmitter.completeWithError(e);
COMPLETED_SSE.put(sseEmitter, Boolean.TRUE);
stringRedisTemplate.delete(askingKey);
}
}
public void sendComplete(long userId, SseEmitter sseEmitter, String msg) {
if (Boolean.TRUE.equals(COMPLETED_SSE.getIfPresent(sseEmitter))) {
log.warn("sseEmitter already completed,userId:{}", userId);
delSseRequesting(userId);
return;
}
try {
sseEmitter.send(SseEmitter.event().name(AdiConstant.SSEEventName.DONE).data(msg));
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
COMPLETED_SSE.put(sseEmitter, Boolean.TRUE);
delSseRequesting(userId);
sseEmitter.complete();
}
}
public void sendErrorAndComplete(long userId, SseEmitter sseEmitter, String errorMsg) {
if (Boolean.TRUE.equals(COMPLETED_SSE.getIfPresent(sseEmitter))) {
log.warn("sseEmitter already completed,ignore error:{}", errorMsg);
delSseRequesting(userId);
return;
}
try {
SseEmitter.SseEventBuilder event = SseEmitter.event();
event.name(AdiConstant.SSEEventName.ERROR);
event.data(Objects.toString(errorMsg, ""));
sseEmitter.send(event);
} catch (IOException e) {
log.warn("sendErrorAndComplete userId:{},errorMsg:{}", userId, errorMsg);
throw new RuntimeException(e);
} finally {
COMPLETED_SSE.put(sseEmitter, Boolean.TRUE);
delSseRequesting(userId);
sseEmitter.complete();
}
}
private void delSseRequesting(long userId) {
String askingKey = MessageFormat.format(RedisKeyConstant.USER_ASKING, userId);
stringRedisTemplate.delete(askingKey);
}
}

View File

@@ -0,0 +1,11 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.ruoyi.workflow.entity.WorkflowComponent;
@Mapper
public interface WorkflowComponentMapper extends BaseMapper<WorkflowComponent> {
Integer countRefNodes(@Param("uuid") String uuid);
}

View File

@@ -0,0 +1,9 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.ruoyi.workflow.entity.WorkflowEdge;
@Mapper
public interface WorkflowEdgeMapper extends BaseMapper<WorkflowEdge> {
}

View File

@@ -0,0 +1,9 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.ruoyi.workflow.entity.Workflow;
@Mapper
public interface WorkflowMapper extends BaseMapper<Workflow> {
}

View File

@@ -0,0 +1,10 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.ruoyi.workflow.entity.WorkflowNode;
@Mapper
public interface WorkflowNodeMapper extends BaseMapper<WorkflowNode> {
WorkflowNode getStartNode(long workflowId);
}

View File

@@ -0,0 +1,13 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.ruoyi.workflow.entity.WorkflowRuntime;
@Mapper
public interface WorkflowRunMapper extends BaseMapper<WorkflowRuntime> {
Page<WorkflowRuntime> pageByWfUuid(Page<WorkflowRuntime> page, @Param("wfUuid") String wfUuid, @Param("userId") Long userId);
}

View File

@@ -0,0 +1,9 @@
package org.ruoyi.workflow.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import org.ruoyi.workflow.entity.WorkflowRuntimeNode;
@Mapper
public interface WorkflowRuntimeNodeMapper extends BaseMapper<WorkflowRuntimeNode> {
}

View File

@@ -0,0 +1,126 @@
package org.ruoyi.workflow.service;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.dto.workflow.WfComponentReq;
import org.ruoyi.workflow.dto.workflow.WfComponentSearchReq;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.mapper.WorkflowComponentMapper;
import org.ruoyi.workflow.util.PrivilegeUtil;
import org.ruoyi.workflow.util.UuidUtil;
import org.ruoyi.workflow.workflow.WfComponentNameEnum;
import org.springframework.beans.BeanUtils;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import java.util.List;
import static org.ruoyi.workflow.cosntant.RedisKeyConstant.WORKFLOW_COMPONENTS;
import static org.ruoyi.workflow.cosntant.RedisKeyConstant.WORKFLOW_COMPONENT_START_KEY;
import static org.ruoyi.workflow.enums.ErrorEnum.C_WF_COMPONENT_DELETED_FAIL_BY_USED;
@Slf4j
@Service
public class WorkflowComponentService extends ServiceImpl<WorkflowComponentMapper, WorkflowComponent> {
@Lazy
@Resource
private WorkflowComponentService self;
@CacheEvict(cacheNames = {WORKFLOW_COMPONENTS, WORKFLOW_COMPONENT_START_KEY})
public WorkflowComponent addOrUpdate(WfComponentReq req) {
WorkflowComponent wfComponent;
if (StringUtils.isNotBlank(req.getUuid())) {
wfComponent = PrivilegeUtil.checkAndGetByUuid(req.getUuid(), this.query(), ErrorEnum.A_WF_COMPONENT_NOT_FOUND);
WorkflowComponent update = new WorkflowComponent();
BeanUtils.copyProperties(req, update, "id", "uuid");
update.setId(wfComponent.getId());
update.setName(req.getName());
update.setTitle(req.getTitle());
update.setRemark(req.getRemark());
update.setIsEnable(req.getIsEnable());
update.setDisplayOrder(req.getDisplayOrder());
this.baseMapper.updateById(update);
return update;
} else {
wfComponent = new WorkflowComponent();
BeanUtils.copyProperties(req, wfComponent, "id", "uuid");
wfComponent.setUuid(UuidUtil.createShort());
this.baseMapper.insert(wfComponent);
return wfComponent;
}
}
@CacheEvict(cacheNames = {WORKFLOW_COMPONENTS, WORKFLOW_COMPONENT_START_KEY})
public void enable(String uuid, Boolean isEnable) {
WorkflowComponent wfComponent = PrivilegeUtil.checkAndGetByUuid(uuid, this.query(), ErrorEnum.A_WF_COMPONENT_NOT_FOUND);
WorkflowComponent update = new WorkflowComponent();
update.setIsEnable(isEnable);
update.setId(wfComponent.getId());
this.baseMapper.updateById(update);
}
@CacheEvict(cacheNames = {WORKFLOW_COMPONENTS, WORKFLOW_COMPONENT_START_KEY})
public void deleteByUuid(String uuid) {
WorkflowComponent component = PrivilegeUtil.checkAndGetByUuid(uuid, this.query(), ErrorEnum.A_WF_COMPONENT_NOT_FOUND);
Integer refNodeCount = baseMapper.countRefNodes(uuid);
if (refNodeCount != null && refNodeCount > 0) {
throw new BaseException(C_WF_COMPONENT_DELETED_FAIL_BY_USED.getInfo());
}
boolean updated = ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowComponent::getId, component.getId())
.set(WorkflowComponent::getIsDeleted, true)
.set(WorkflowComponent::getIsEnable, false)
.update();
if (!updated) {
throw new BaseException(ErrorEnum.A_WF_COMPONENT_NOT_FOUND.getInfo());
}
}
public Page<WorkflowComponent> search(WfComponentSearchReq searchReq, Integer currentPage, Integer pageSize) {
LambdaQueryWrapper<WorkflowComponent> wrapper = new LambdaQueryWrapper<>();
wrapper.eq(WorkflowComponent::getIsDeleted, false);
wrapper.eq(null != searchReq.getIsEnable(), WorkflowComponent::getIsEnable, searchReq.getIsEnable());
wrapper.like(StringUtils.isNotBlank(searchReq.getTitle()), WorkflowComponent::getTitle, searchReq.getTitle());
wrapper.orderByAsc(List.of(WorkflowComponent::getDisplayOrder, WorkflowComponent::getId));
return baseMapper.selectPage(new Page<>(currentPage, pageSize), wrapper);
}
@Cacheable(cacheNames = WORKFLOW_COMPONENTS)
public List<WorkflowComponent> getAllEnable() {
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowComponent::getIsEnable, true)
.eq(WorkflowComponent::getIsDeleted, false)
.orderByAsc(List.of(WorkflowComponent::getDisplayOrder, WorkflowComponent::getId))
.list();
}
@Cacheable(cacheNames = WORKFLOW_COMPONENT_START_KEY)
public WorkflowComponent getStartComponent() {
List<WorkflowComponent> components = self.getAllEnable();
return components.stream()
.filter(component -> WfComponentNameEnum.START.getName().equals(component.getName()))
.findFirst()
.orElseThrow(() -> new BaseException(ErrorEnum.B_WF_NODE_DEFINITION_NOT_FOUND.getInfo()));
}
public WorkflowComponent getComponent(Long id) {
List<WorkflowComponent> components = self.getAllEnable();
return components.stream()
.filter(component -> component.getId().equals(id))
.findFirst()
.orElseThrow(() -> new BaseException(ErrorEnum.B_WF_NODE_DEFINITION_NOT_FOUND.getInfo()));
}
}

View File

@@ -0,0 +1,118 @@
package org.ruoyi.workflow.service;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.dto.workflow.WfEdgeReq;
import org.ruoyi.workflow.entity.WorkflowEdge;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.mapper.WorkflowEdgeMapper;
import org.ruoyi.workflow.util.MPPageUtil;
import org.ruoyi.workflow.util.UuidUtil;
import org.springframework.beans.BeanUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
@Slf4j
@Service
public class WorkflowEdgeService extends ServiceImpl<WorkflowEdgeMapper, WorkflowEdge> {
@Lazy
@Resource
private WorkflowEdgeService self;
public List<WfEdgeReq> listDtoByWfId(long workflowId) {
List<WorkflowEdge> edges = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowEdge::getWorkflowId, workflowId)
.eq(WorkflowEdge::getIsDeleted, false)
.list();
return MPPageUtil.convertToList(edges, WfEdgeReq.class);
}
@Transactional
public void createOrUpdateEdges(Long workflowId, List<WfEdgeReq> edges) {
List<String> uuidList = new ArrayList<>();
for (WfEdgeReq edge : edges) {
WorkflowEdge newOne = new WorkflowEdge();
BeanUtils.copyProperties(edge, newOne);
newOne.setWorkflowId(workflowId);
WorkflowEdge old = self.getByUuid(edge.getUuid());
if (null != old) {
log.info("更新边,id:{},uuid:{},source:{},sourceHandle:{},target:{}",
edge.getId(), edge.getUuid(), edge.getSourceNodeUuid(), edge.getSourceHandle(), edge.getTargetNodeUuid());
newOne.setId(old.getId());
} else {
newOne.setId(null);
log.info("新增边,uuid:{},source:{},sourceHandle:{},target:{}",
edge.getUuid(), edge.getSourceNodeUuid(), edge.getSourceHandle(), edge.getTargetNodeUuid());
}
uuidList.add(edge.getUuid());
self.saveOrUpdate(newOne);
}
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowEdge::getWorkflowId, workflowId)
.notIn(CollUtil.isNotEmpty(uuidList), WorkflowEdge::getUuid, uuidList)
.set(WorkflowEdge::getIsDeleted, true)
.update();
}
public List<WorkflowEdge> listByWorkflowId(Long workflowId) {
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowEdge::getWorkflowId, workflowId)
.eq(WorkflowEdge::getIsDeleted, false)
.list();
}
public List<WorkflowEdge> copyByWorkflowId(long workflowId, long targetWorkflow) {
List<WorkflowEdge> result = new ArrayList<>();
self.listByWorkflowId(workflowId).forEach(edge -> {
result.add(self.copyEdge(targetWorkflow, edge));
});
return result;
}
public WorkflowEdge copyEdge(long targetWorkflow, WorkflowEdge sourceEdge) {
WorkflowEdge newEdge = new WorkflowEdge();
BeanUtils.copyProperties(sourceEdge, newEdge, "id", "uuid", "createTime", "updateTime");
newEdge.setUuid(UuidUtil.createShort());
newEdge.setWorkflowId(targetWorkflow);
baseMapper.insert(newEdge);
return getById(newEdge.getId());
}
@Transactional
public void deleteEdges(Long workflowId, List<String> uuids) {
if (CollectionUtils.isEmpty(uuids)) {
return;
}
for (String uuid : uuids) {
WorkflowEdge old = self.getByUuid(uuid);
if (null != old && !old.getWorkflowId().equals(workflowId)) {
log.error("该边不属于指定的工作流,删除失败,workflowId:{},node workflowId:{}", workflowId, workflowId);
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowEdge::getWorkflowId, workflowId)
.eq(WorkflowEdge::getUuid, uuid)
.set(WorkflowEdge::getIsDeleted, true)
.update();
}
}
public WorkflowEdge getByUuid(String uuid) {
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowEdge::getUuid, uuid)
.eq(WorkflowEdge::getIsDeleted, false)
.last("limit 1")
.one();
}
}

View File

@@ -0,0 +1,245 @@
package org.ruoyi.workflow.service;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.dto.workflow.WfNodeDto;
import org.ruoyi.workflow.entity.Workflow;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.mapper.WorkflowNodeMapper;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.util.MPPageUtil;
import org.ruoyi.workflow.util.UuidUtil;
import org.ruoyi.workflow.workflow.WfComponentNameEnum;
import org.ruoyi.workflow.workflow.WfNodeInputConfig;
import org.ruoyi.workflow.workflow.def.WfNodeIOText;
import org.springframework.beans.BeanUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
@Slf4j
@Service
public class WorkflowNodeService extends ServiceImpl<WorkflowNodeMapper, WorkflowNode> {
@Lazy
@Resource
private WorkflowNodeService self;
@Resource
private WorkflowComponentService workflowComponentService;
public WorkflowNode getStartNode(long workflowId) {
return baseMapper.getStartNode(workflowId);
}
public List<WfNodeDto> listDtoByWfId(long workflowId) {
List<WorkflowNode> workflowNodeList = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, workflowId)
.eq(WorkflowNode::getIsDeleted, false)
.list();
workflowNodeList.forEach(this::checkAndDecrypt);
return MPPageUtil.convertToList(workflowNodeList, WfNodeDto.class, (source, target) -> {
target.setInputConfig(JsonUtil.toBean(source.getInputConfig(), ObjectNode.class));
target.setNodeConfig(JsonUtil.toBean(source.getNodeConfig(), ObjectNode.class));
return target;
});
}
public WorkflowNode getByUuid(long workflowId, String uuid) {
WorkflowNode node = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, workflowId)
.eq(WorkflowNode::getUuid, uuid)
.eq(WorkflowNode::getIsDeleted, false)
.last("limit 1")
.one();
checkAndDecrypt(node);
return node;
}
public List<WorkflowNode> listByWorkflowId(Long workflowId) {
List<WorkflowNode> list = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, workflowId)
.eq(WorkflowNode::getIsDeleted, false)
.list();
list.forEach(this::checkAndDecrypt);
return list;
}
public List<WorkflowNode> copyByWorkflowId(long workflowId, long targetWorkflowId) {
List<WorkflowNode> result = new ArrayList<>();
self.listByWorkflowId(workflowId).forEach(node -> {
result.add(self.copyNode(targetWorkflowId, node));
});
return result;
}
public WorkflowNode copyNode(Long targetWorkflowId, WorkflowNode sourceNode) {
WorkflowNode newNode = new WorkflowNode();
BeanUtils.copyProperties(sourceNode, newNode, "id", "createTime", "updateTime");
newNode.setWorkflowId(targetWorkflowId);
baseMapper.insert(newNode);
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, targetWorkflowId)
.eq(WorkflowNode::getUuid, newNode.getUuid())
.eq(WorkflowNode::getIsDeleted, false)
.last("limit 1")
.one();
}
@Transactional
public void createOrUpdateNodes(Long workflowId, List<WfNodeDto> nodes) {
List<Object> uuidList = new ArrayList<>();
for (WfNodeDto node : nodes) {
WorkflowNode newOrUpdate = new WorkflowNode();
BeanUtils.copyProperties(node, newOrUpdate);
newOrUpdate.setInputConfig(JsonUtil.toJson(node.getInputConfig()));
newOrUpdate.setNodeConfig(JsonUtil.toJson(node.getNodeConfig()));
newOrUpdate.setWorkflowId(workflowId);
checkAndEncrypt(newOrUpdate);
WorkflowNode old = self.getByUuid(workflowId, node.getUuid());
if (null != old) {
log.info("更新节点,uuid:{},title:{}", node.getUuid(), node.getTitle());
newOrUpdate.setId(old.getId());
} else {
log.info("新增节点,uuid:{},title:{}", node.getUuid(), node.getTitle());
newOrUpdate.setId(null);
}
uuidList.add(node.getUuid());
self.saveOrUpdate(newOrUpdate);
}
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, workflowId)
.notIn(CollUtil.isNotEmpty(uuidList), WorkflowNode::getUuid, uuidList)
.set(WorkflowNode::getIsDeleted, true)
.update();
}
private void checkAndEncrypt(WorkflowNode workflowNode) {
WorkflowComponent component = workflowComponentService.getAllEnable()
.stream()
.filter(item -> item.getId().equals(workflowNode.getWorkflowComponentId()))
.findFirst()
.orElse(null);
if (null == component) {
log.error("节点不存在,uuid:{},title:{}", workflowNode.getUuid(), workflowNode.getTitle());
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
if (component.getName().equals(WfComponentNameEnum.MAIL_SEND.getName())) {
//加密(目前暂时只在数据库层做加密,前后端交互时数据加解密待定)
// MailSendNodeConfig mailSendNodeConfig = JsonUtil.fromJson(workflowNode.getNodeConfig(), MailSendNodeConfig.class);
// if (null != mailSendNodeConfig && null != mailSendNodeConfig.getSender() && null != mailSendNodeConfig.getSender().getPassword()) {
// String password = mailSendNodeConfig.getSender().getPassword();
// String encrypt = AesUtil.encrypt(password);
// mailSendNodeConfig.getSender().setPassword(encrypt);
// workflowNode.setNodeConfig(JsonUtil.toJson(mailSendNodeConfig));
// }
}
}
private void checkAndDecrypt(WorkflowNode workflowNode) {
if (null == workflowNode) {
log.warn("节点不存在");
return;
}
WorkflowComponent component = workflowComponentService.getAllEnable()
.stream()
.filter(item -> item.getId().equals(workflowNode.getWorkflowComponentId()))
.findFirst()
.orElse(null);
if (null == component) {
log.error("节点不存在,uuid:{},title:{}", workflowNode.getUuid(), workflowNode.getTitle());
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
if (component.getName().equals(WfComponentNameEnum.MAIL_SEND.getName())) {
// MailSendNodeConfig mailSendNodeConfig = JsonUtil.fromJson(workflowNode.getNodeConfig(), MailSendNodeConfig.class);
// if (null != mailSendNodeConfig && null != mailSendNodeConfig.getSender() && null != mailSendNodeConfig.getSender().getPassword()) {
// String password = mailSendNodeConfig.getSender().getPassword();
// if (StringUtils.isNotBlank(password)) {
// String decrypt = AesUtil.decrypt(password);
// mailSendNodeConfig.getSender().setPassword(decrypt);
// }
// workflowNode.setNodeConfig(JsonUtil.toJson(mailSendNodeConfig));
// }
}
}
@Transactional
public void deleteNodes(Long workflowId, List<String> uuids) {
if (CollectionUtils.isEmpty(uuids)) {
return;
}
for (String uuid : uuids) {
WorkflowNode old = self.getByUuid(workflowId, uuid);
if (null == old) {
continue;
}
if (!old.getWorkflowId().equals(workflowId)) {
log.error("节点不属于指定的工作流,删除失败,workflowId:{},node workflowId:{}", workflowId, workflowId);
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
if (workflowComponentService.getStartComponent().getId().equals(old.getWorkflowComponentId())) {
log.warn("开始节点不能删除,uuid:{}", old.getUuid());
continue;
}
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowNode::getWorkflowId, workflowId)
.eq(WorkflowNode::getUuid, uuid)
.set(WorkflowNode::getIsDeleted, true)
.update();
}
}
/**
* user_inputs:
* [
* {
* "uuid": "12bc919774aa4e779d97e3dd9c836e11",
* "name": "var_user_input",
* "title": "用户输入",
* "type": 1,
* "required": true,
* "max_length": 1000
* }
* ]
*
* @param workflow 工作流定义
*/
public WorkflowNode createStartNode(Workflow workflow) {
WfNodeIOText wfNodeIOText = new WfNodeIOText();
wfNodeIOText.setUuid(UuidUtil.createShort());
wfNodeIOText.setType(WfIODataTypeEnum.TEXT.getValue());
wfNodeIOText.setName("var_user_input");
wfNodeIOText.setTitle("用户输入");
wfNodeIOText.setRequired(false);
wfNodeIOText.setMaxLength(1000);
WfNodeInputConfig nodeInputConfig = new WfNodeInputConfig();
nodeInputConfig.setUserInputs(List.of(wfNodeIOText));
nodeInputConfig.setRefInputs(new ArrayList<>());
WorkflowComponent startComponent = workflowComponentService.getStartComponent();
WorkflowNode node = new WorkflowNode();
node.setWorkflowComponentId(startComponent.getId());
node.setWorkflowId(workflow.getId());
node.setRemark("用户输入");
node.setUuid(UuidUtil.createShort());
node.setTitle("开始");
node.setInputConfig(JsonUtil.toJson(nodeInputConfig));
node.setNodeConfig("{}");
baseMapper.insert(node);
return node;
}
}

View File

@@ -0,0 +1,103 @@
package org.ruoyi.workflow.service;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.extern.slf4j.Slf4j;
import org.ruoyi.workflow.base.ThreadContext;
import org.ruoyi.workflow.dto.workflow.WfRuntimeNodeDto;
import org.ruoyi.workflow.entity.User;
import org.ruoyi.workflow.entity.WorkflowRuntimeNode;
import org.ruoyi.workflow.mapper.WorkflowRuntimeNodeMapper;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.util.MPPageUtil;
import org.ruoyi.workflow.workflow.WfNodeState;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.List;
@Slf4j
@Service
public class WorkflowRuntimeNodeService extends ServiceImpl<WorkflowRuntimeNodeMapper, WorkflowRuntimeNode> {
public List<WfRuntimeNodeDto> listByWfRuntimeId(long runtimeId) {
List<WorkflowRuntimeNode> workflowNodeList = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(!ThreadContext.getCurrentUser().getIsAdmin(), WorkflowRuntimeNode::getUserId, ThreadContext.getCurrentUser().getId())
.eq(WorkflowRuntimeNode::getWorkflowRuntimeId, runtimeId)
.eq(WorkflowRuntimeNode::getIsDeleted, false)
.orderByAsc(WorkflowRuntimeNode::getId)
.list();
List<WfRuntimeNodeDto> result = MPPageUtil.convertToList(workflowNodeList, WfRuntimeNodeDto.class);
for (WfRuntimeNodeDto dto : result) {
fillInputOutput(dto);
}
return result;
}
public WfRuntimeNodeDto createByState(User user, long wfNodeId, long wfRuntimeId, WfNodeState state) {
WorkflowRuntimeNode runtimeNode = new WorkflowRuntimeNode();
runtimeNode.setUuid(state.getUuid());
runtimeNode.setWorkflowRuntimeId(wfRuntimeId);
runtimeNode.setStatus(state.getProcessStatus());
runtimeNode.setUserId(user.getId());
runtimeNode.setNodeId(wfNodeId);
baseMapper.insert(runtimeNode);
runtimeNode = baseMapper.selectById(runtimeNode.getId());
WfRuntimeNodeDto result = new WfRuntimeNodeDto();
BeanUtils.copyProperties(runtimeNode, result);
fillInputOutput(result);
return result;
}
public void updateInput(Long id, WfNodeState state) {
if (CollectionUtils.isEmpty(state.getInputs())) {
log.warn("没有输入数据,id:{}", id);
return;
}
WorkflowRuntimeNode node = baseMapper.selectById(id);
if (null == node) {
log.error("节点实例不存在,id:{}", id);
return;
}
WorkflowRuntimeNode updateOne = new WorkflowRuntimeNode();
updateOne.setId(id);
ObjectNode ob = JsonUtil.createObjectNode();
state.getInputs().forEach(data -> ob.set(data.getName(), JsonUtil.classToJsonNode(data.getContent())));
updateOne.setInput(JsonUtil.toJson(ob));
updateOne.setStatus(state.getProcessStatus());
updateOne.setStatusRemark(state.getProcessStatusRemark());
baseMapper.updateById(updateOne);
}
public void updateOutput(Long id, WfNodeState state) {
WorkflowRuntimeNode node = baseMapper.selectById(id);
if (null == node) {
log.error("节点实例不存在,id:{}", id);
return;
}
WorkflowRuntimeNode updateOne = new WorkflowRuntimeNode();
updateOne.setId(id);
if (!CollectionUtils.isEmpty(state.getOutputs())) {
ObjectNode ob = JsonUtil.createObjectNode();
state.getOutputs().forEach(data -> ob.set(data.getName(), JsonUtil.classToJsonNode(data.getContent())));
updateOne.setOutput(JsonUtil.toJson(ob));
}
updateOne.setStatus(state.getProcessStatus());
updateOne.setStatusRemark(state.getProcessStatusRemark());
baseMapper.updateById(updateOne);
}
private void fillInputOutput(WfRuntimeNodeDto dto) {
if (null == dto.getInput()) {
dto.setInput(JsonUtil.createObjectNode());
}
if (null == dto.getOutput()) {
dto.setOutput(JsonUtil.createObjectNode());
}
}
}

View File

@@ -0,0 +1,174 @@
package org.ruoyi.workflow.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.workflow.base.ThreadContext;
import org.ruoyi.workflow.dto.workflow.WfRuntimeNodeDto;
import org.ruoyi.workflow.dto.workflow.WfRuntimeResp;
import org.ruoyi.workflow.entity.User;
import org.ruoyi.workflow.entity.Workflow;
import org.ruoyi.workflow.entity.WorkflowRuntime;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.mapper.WorkflowRunMapper;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.util.MPPageUtil;
import org.ruoyi.workflow.util.PrivilegeUtil;
import org.ruoyi.workflow.util.UuidUtil;
import org.ruoyi.workflow.workflow.WfState;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.util.List;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.WORKFLOW_PROCESS_STATUS_DOING;
@Slf4j
@Service
public class WorkflowRuntimeService extends ServiceImpl<WorkflowRunMapper, WorkflowRuntime> {
@Resource
private WorkflowService workflowService;
@Resource
private WorkflowRuntimeNodeService workflowRuntimeNodeService;
public WfRuntimeResp create(User user, Long workflowId) {
WorkflowRuntime one = new WorkflowRuntime();
one.setUuid(UuidUtil.createShort());
one.setUserId(user.getId());
one.setWorkflowId(workflowId);
baseMapper.insert(one);
one = baseMapper.selectById(one.getId());
return changeToDTO(one);
}
public void updateInput(long id, WfState wfState) {
if (CollectionUtils.isEmpty(wfState.getInput())) {
log.warn("没有输入数据,id:{}", id);
return;
}
WorkflowRuntime node = baseMapper.selectById(id);
if (null == node) {
log.error("工作流实例不存在,id:{}", id);
return;
}
WorkflowRuntime updateOne = new WorkflowRuntime();
updateOne.setId(id);
ObjectNode ob = JsonUtil.createObjectNode();
for (NodeIOData data : wfState.getInput()) {
ob.set(data.getName(), JsonUtil.classToJsonNode(data.getContent()));
}
updateOne.setInput(JsonUtil.toJson(ob));
updateOne.setStatus(WORKFLOW_PROCESS_STATUS_DOING);
baseMapper.updateById(updateOne);
}
public WorkflowRuntime updateOutput(long id, WfState wfState) {
WorkflowRuntime node = baseMapper.selectById(id);
if (null == node) {
log.error("工作流实例不存在,id:{}", id);
return null;
}
WorkflowRuntime updateOne = new WorkflowRuntime();
updateOne.setId(id);
ObjectNode ob = JsonUtil.createObjectNode();
for (NodeIOData data : wfState.getOutput()) {
ob.set(data.getName(), JsonUtil.classToJsonNode(data.getContent()));
}
updateOne.setOutput(JsonUtil.toJson(ob));
updateOne.setStatus(wfState.getProcessStatus());
baseMapper.updateById(updateOne);
return updateOne;
}
public void updateStatus(long id, int processStatus, String statusRemark) {
WorkflowRuntime node = baseMapper.selectById(id);
if (null == node) {
log.error("工作流实例不存在,id:{}", id);
return;
}
WorkflowRuntime updateOne = new WorkflowRuntime();
updateOne.setId(id);
updateOne.setStatus(processStatus);
updateOne.setStatusRemark(StringUtils.substring(statusRemark, 0, 250));
baseMapper.updateById(updateOne);
}
public WorkflowRuntime getByUuid(String uuid) {
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(!ThreadContext.getCurrentUser().getIsAdmin(), WorkflowRuntime::getUserId, ThreadContext.getCurrentUserId())
.eq(WorkflowRuntime::getUuid, uuid)
.eq(WorkflowRuntime::getIsDeleted, false)
.last("limit 1")
.one();
}
public Page<WfRuntimeResp> page(String wfUuid, Integer currentPage, Integer pageSize) {
Workflow workflow = workflowService.getOrThrow(wfUuid);
User user = ThreadContext.getCurrentUser();
Page<WorkflowRuntime> page = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(WorkflowRuntime::getWorkflowId, workflow.getId())
.eq(WorkflowRuntime::getIsDeleted, false)
.eq(!user.getIsAdmin(), WorkflowRuntime::getUserId, user.getId())
.orderByDesc(WorkflowRuntime::getUpdateTime)
.page(new Page<>(currentPage, pageSize));
Page<WfRuntimeResp> result = new Page<>();
MPPageUtil.convertToPage(page, result, WfRuntimeResp.class, (source, target) -> {
fillInputOutput(target);
return target;
});
return result;
}
public List<WfRuntimeNodeDto> listByRuntimeUuid(String runtimeUuid) {
WorkflowRuntime runtime = PrivilegeUtil.checkAndGetByUuid(runtimeUuid, this.query(), ErrorEnum.A_WF_RUNTIME_NOT_FOUND);
return workflowRuntimeNodeService.listByWfRuntimeId(runtime.getId());
}
public boolean deleteAll(String wfUuid) {
Workflow workflow = workflowService.getOrThrow(wfUuid);
User user = ThreadContext.getCurrentUser();
return ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowRuntime::getWorkflowId, workflow.getId())
.eq(!user.getIsAdmin(), WorkflowRuntime::getUserId, user.getId())
.set(WorkflowRuntime::getIsDeleted, true)
.update();
}
private WfRuntimeResp changeToDTO(WorkflowRuntime runtime) {
WfRuntimeResp result = new WfRuntimeResp();
BeanUtils.copyProperties(runtime, result);
fillInputOutput(result);
return result;
}
// private void fillNodes(WfRuntimeResp runtimeResp) {
// List<WfRuntimeNodeDto> nodes = workflowRuntimeNodeService.listByWfRuntimeId(runtimeResp.getId());
// runtimeResp.setNodes(nodes);
// }
private void fillInputOutput(WfRuntimeResp target) {
if (null == target.getInput()) {
target.setInput(JsonUtil.createObjectNode());
}
if (null == target.getOutput()) {
target.setOutput(JsonUtil.createObjectNode());
}
}
public boolean softDelete(String uuid) {
WorkflowRuntime workflowRuntime = PrivilegeUtil.checkAndGetByUuid(uuid, this.query(), ErrorEnum.A_WF_NOT_FOUND);
return ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(WorkflowRuntime::getId, workflowRuntime.getId())
.set(WorkflowRuntime::getIsDeleted, true)
.update();
}
}

View File

@@ -0,0 +1,193 @@
package org.ruoyi.workflow.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.extension.toolkit.ChainWrappers;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.base.ThreadContext;
import org.ruoyi.workflow.dto.workflow.WfEdgeReq;
import org.ruoyi.workflow.dto.workflow.WfNodeDto;
import org.ruoyi.workflow.dto.workflow.WorkflowResp;
import org.ruoyi.workflow.dto.workflow.WorkflowUpdateReq;
import org.ruoyi.workflow.entity.User;
import org.ruoyi.workflow.entity.Workflow;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.mapper.WorkflowMapper;
import org.ruoyi.workflow.util.MPPageUtil;
import org.ruoyi.workflow.util.PrivilegeUtil;
import org.ruoyi.workflow.util.UuidUtil;
import org.springframework.beans.BeanUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
@Slf4j
@Service
public class WorkflowService extends ServiceImpl<WorkflowMapper, Workflow> {
@Lazy
@Resource
private WorkflowService self;
@Resource
private WorkflowNodeService workflowNodeService;
@Resource
private WorkflowEdgeService workflowEdgeService;
@Resource
private WorkflowComponentService workflowComponentService;
@Transactional
public WorkflowResp add(String title, String remark, Boolean isPublic) {
String uuid = UuidUtil.createShort();
Workflow one = new Workflow();
one.setUuid(uuid);
one.setTitle(title);
one.setUserId(ThreadContext.getCurrentUserId());
one.setRemark(remark);
one.setIsEnable(true);
one.setIsPublic(isPublic);
baseMapper.insert(one);
workflowNodeService.createStartNode(one);
return changeWorkflowToDTO(one);
}
public void setPublic(String wfUuid, Boolean isPublic) {
Workflow workflow = PrivilegeUtil.checkAndGetByUuid(wfUuid, this.query(), ErrorEnum.A_WF_NOT_FOUND);
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(Workflow::getId, workflow.getId())
.set(Workflow::getIsPublic, isPublic)
.update();
}
public WorkflowResp updateBaseInfo(String wfUuid, String title, String remark, Boolean isPublic) {
if (StringUtils.isAnyBlank(wfUuid, title)) {
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(Workflow::getUuid, wfUuid)
.eq(!ThreadContext.getCurrentUser().getIsAdmin(), Workflow::getUserId, ThreadContext.getCurrentUserId())
.set(Workflow::getTitle, title)
.set(Workflow::getRemark, remark)
.set(null != isPublic, Workflow::getIsPublic, isPublic)
.update();
Workflow workflow = getOrThrow(wfUuid);
return changeWorkflowToDTO(workflow);
}
@Transactional
public WorkflowResp update(WorkflowUpdateReq req) {
Workflow workflow = PrivilegeUtil.checkAndGetByUuid(req.getUuid(), this.query(), ErrorEnum.A_WF_NOT_FOUND);
long workflowId = workflow.getId();
workflowNodeService.createOrUpdateNodes(workflowId, req.getNodes());
workflowEdgeService.createOrUpdateEdges(workflowId, req.getEdges());
Workflow workflow2 = getOrThrow(req.getUuid());
return changeWorkflowToDTO(workflow2);
}
public Workflow getByUuid(String uuid) {
return ChainWrappers.lambdaQueryChain(baseMapper)
.eq(Workflow::getUuid, uuid)
.eq(Workflow::getIsDeleted, false)
.last("limit 1")
.one();
}
public Workflow getOrThrow(String uuid) {
Workflow workflow = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(Workflow::getUuid, uuid)
.eq(Workflow::getIsDeleted, false)
.last("limit 1")
.one();
if (null == workflow) {
throw new BaseException(ErrorEnum.A_WF_NOT_FOUND.getInfo());
}
return workflow;
}
public Page<WorkflowResp> search(String keyword, Boolean isPublic, Boolean isEnable, Integer currentPage, Integer pageSize) {
User user = ThreadContext.getCurrentUser();
Page<Workflow> page = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(Workflow::getIsDeleted, false)
.eq(null != isPublic, Workflow::getIsPublic, isPublic)
.eq(null != isEnable, Workflow::getIsEnable, isEnable)
.like(StringUtils.isNotBlank(keyword), Workflow::getTitle, keyword)
.eq(!user.getIsAdmin(), Workflow::getUserId, user.getId())
.orderByDesc(Workflow::getUpdateTime)
.page(new Page<>(currentPage, pageSize));
Page<WorkflowResp> result = new Page<>();
List<Long> userIds = new ArrayList<>();
MPPageUtil.convertToPage(page, result, WorkflowResp.class, (source, target) -> {
fillNodesAndEdges(target);
userIds.add(source.getUserId());
return target;
});
// fillUserInfos(userIds, result.getRecords());
return result;
}
public Page<WorkflowResp> searchPublic(String keyword, Integer currentPage, Integer pageSize) {
Page<Workflow> page = ChainWrappers.lambdaQueryChain(baseMapper)
.eq(Workflow::getIsDeleted, false)
.eq(Workflow::getIsPublic, true)
.eq(Workflow::getIsEnable, true)
.like(StringUtils.isNotBlank(keyword), Workflow::getTitle, keyword)
.orderByDesc(Workflow::getUpdateTime)
.page(new Page<>(currentPage, pageSize));
Page<WorkflowResp> result = new Page<>();
List<Long> userIds = new ArrayList<>();
MPPageUtil.convertToPage(page, result, WorkflowResp.class, (source, target) -> {
fillNodesAndEdges(target);
userIds.add(source.getUserId());
return target;
});
// fillUserInfos(userIds, result.getRecords());
return result;
}
public void softDelete(String uuid) {
ChainWrappers.lambdaUpdateChain(baseMapper).eq(Workflow::getUuid, uuid)
.set(Workflow::getIsDeleted, true).update();
}
public void enable(String uuid, Boolean enable) {
if (null == enable) {
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
Workflow workflow = PrivilegeUtil.checkAndGetByUuid(uuid, this.query(), ErrorEnum.A_WF_NOT_FOUND);
ChainWrappers.lambdaUpdateChain(baseMapper)
.eq(Workflow::getId, workflow.getId())
.eq(!ThreadContext.getCurrentUser().getIsAdmin(), Workflow::getUserId, ThreadContext.getCurrentUserId())
.set(Workflow::getIsEnable, enable)
.update();
}
private WorkflowResp changeWorkflowToDTO(Workflow workflow) {
WorkflowResp workflowResp = new WorkflowResp();
BeanUtils.copyProperties(workflow, workflowResp);
fillNodesAndEdges(workflowResp);
// User user = userService.getById(workflow.getUserId());
// if (null != user) {
// workflowResp.setUserUuid(user.getUuid());
// workflowResp.setUserName(user.getName());
// }
return workflowResp;
}
private void fillNodesAndEdges(WorkflowResp workflowResp) {
List<WfNodeDto> nodes = workflowNodeService.listDtoByWfId(workflowResp.getId());
workflowResp.setNodes(nodes);
List<WfEdgeReq> edges = workflowEdgeService.listDtoByWfId(workflowResp.getId());
workflowResp.setEdges(edges);
}
}

View File

@@ -0,0 +1,152 @@
package org.ruoyi.workflow.util;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Slf4j
public class JsonUtil {
private static final ObjectMapper objectMapper = new ObjectMapper();
static {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
objectMapper.configure(SerializationFeature.INDENT_OUTPUT, Boolean.FALSE);
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
objectMapper.registerModules(LocalDateTimeUtil.getSimpleModule(), new JavaTimeModule(), new Jdk8Module());
}
public static String toJson(Object obj) {
String resp = null;
try {
resp = objectMapper.writeValueAsString(obj);
} catch (IOException e) {
log.error("JsonUtil error", e);
}
return resp;
}
/**
* 创建JSON处理器的静态方法
*
* @param content JSON字符串
* @return
*/
private static JsonParser getParser(String content) {
if (StringUtils.isNotBlank(content)) {
try {
return objectMapper.getFactory().createParser(content);
} catch (IOException ioe) {
log.error("JsonUtil getParser error", ioe);
}
}
return null;
}
/**
* JSON对象反序列化
*/
public static <T> T fromJson(String json, Class<T> clazz) {
if (StringUtils.isBlank(json)) {
return null;
}
try {
JsonParser jp = getParser(json);
if (null == jp) {
log.error("json parser is null");
return null;
}
return jp.readValueAs(clazz);
} catch (IOException ioe) {
log.error("反序列化失败", ioe);
}
return null;
}
public static <T> T fromJson(JsonNode jsonNode, Class<T> clazz) {
try {
return objectMapper.treeToValue(jsonNode, clazz);
} catch (JsonProcessingException e) {
log.error("反序列化失败", e);
}
return null;
}
public static <T> List<T> fromArrayNode(ArrayNode arrayNode, Class<T> clazz) {
List<T> result = new ArrayList<>();
try {
for (JsonNode jsonNode : arrayNode) {
result.add(objectMapper.treeToValue(jsonNode, clazz));
}
} catch (JsonProcessingException e) {
log.error("反序列化失败", e);
}
return result;
}
public static JsonNode toJsonNode(String json) {
try {
return objectMapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("反序列化失败", e);
}
return null;
}
public static Map<String, Object> toMap(String json) {
Map<String, Object> result;
try {
result = objectMapper.readValue(json, Map.class);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return result;
}
public static <T> T toBean(String json, Class<T> tClass) {
T result;
try {
result = objectMapper.readValue(json, tClass);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return result;
}
public static Map<String, Object> toMap(Object obj) {
try {
return objectMapper.convertValue(obj, new TypeReference<HashMap<String, Object>>() {
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static JsonNode classToJsonNode(Object obj) {
return objectMapper.valueToTree(obj);
}
public static ObjectNode createObjectNode() {
return objectMapper.createObjectNode();
}
}

View File

@@ -0,0 +1,18 @@
package org.ruoyi.workflow.util;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
public class LocalDateTimeDeserializer extends JsonDeserializer<LocalDateTime> {
@Override
public LocalDateTime deserialize(JsonParser p, DeserializationContext deserializationContext)
throws IOException {
return LocalDateTime.parse(p.getValueAsString(), DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
}
}

View File

@@ -0,0 +1,18 @@
package org.ruoyi.workflow.util;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
public class LocalDateTimeSerializer extends JsonSerializer<LocalDateTime> {
@Override
public void serialize(LocalDateTime value, JsonGenerator gen, SerializerProvider serializers)
throws IOException {
gen.writeString(value.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
}
}

View File

@@ -0,0 +1,68 @@
package org.ruoyi.workflow.util;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import org.apache.commons.lang3.StringUtils;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
public class LocalDateTimeUtil {
public static final DateTimeFormatter PATTERN_DEFAULT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
public static final DateTimeFormatter PATTERN_YYYYMMDDMMHHSS = DateTimeFormatter.ofPattern("yyyyMMddmmHHss");
public static final DateTimeFormatter PATTERN_YYYY_MM_DD = DateTimeFormatter.ofPattern("yyyy-MM-dd");
private LocalDateTimeUtil() {
}
public static SimpleModule getSimpleModule() {
// jackson中自定义处理序列化和反序列化
SimpleModule customModule = new SimpleModule();
customModule.addSerializer(Long.class, ToStringSerializer.instance);
// 时间序列化
customModule.addSerializer(LocalDateTime.class, new LocalDateTimeSerializer());
customModule.addDeserializer(LocalDateTime.class, new LocalDateTimeDeserializer());
return customModule;
}
public static LocalDateTime parse(String localDateTime) {
return LocalDateTime.parse(localDateTime, PATTERN_DEFAULT);
}
public static LocalDateTime parse(Long epochMilli) {
return LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMilli), ZoneId.systemDefault());
}
public static String format(LocalDateTime localDateTime) {
if (null == localDateTime) {
return StringUtils.EMPTY;
}
return localDateTime.format(PATTERN_DEFAULT);
}
public static String format(LocalDateTime localDateTime, String pattern) {
if (null == localDateTime) {
return StringUtils.EMPTY;
}
return localDateTime.format(DateTimeFormatter.ofPattern(pattern));
}
public static String format(LocalDateTime localDateTime, DateTimeFormatter pattern) {
if (null == localDateTime) {
return StringUtils.EMPTY;
}
return localDateTime.format(pattern);
}
public static int getIntDay(LocalDateTime localDateTime) {
return localDateTime.getYear() * 10000 + localDateTime.getMonthValue() * 100 + localDateTime.getDayOfMonth();
}
public static int getToday() {
LocalDateTime now = LocalDateTime.now();
return now.getYear() * 10000 + now.getMonthValue() * 100 + now.getDayOfMonth();
}
}

View File

@@ -0,0 +1,86 @@
package org.ruoyi.workflow.util;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.BeanUtils;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;
@Slf4j
public class MPPageUtil {
private MPPageUtil() {
}
public static <T, U> Page<U> convertToPage(Page<T> source, Class<U> targetRecordClass) {
return MPPageUtil.convertToPage(source, new Page<>(), targetRecordClass, null);
}
public static <T, U> Page<U> convertToPage(Page<T> source, Page<U> target, Class<U> targetRecordClass) {
return MPPageUtil.convertToPage(source, target, targetRecordClass, null);
}
public static <T, U> Page<U> convertToPage(Page<T> source, Page<U> target, Class<U> targetRecordClass, BiFunction<T, U, U> biFunction) {
BeanUtils.copyProperties(source, target);
List<U> records = new ArrayList<>();
target.setRecords(records);
try {
for (T t : source.getRecords()) {
U u = targetRecordClass.getDeclaredConstructor().newInstance();
BeanUtils.copyProperties(t, u);
if (null != biFunction) {
biFunction.apply(t, u);
}
records.add(u);
}
} catch (NoSuchMethodException e1) {
log.error("convertTo error1", e1);
} catch (Exception e2) {
log.error("convertTo error2", e2);
}
return target;
}
public static <T, U> List<U> convertToList(List<T> source, Class<U> targetRecordClass) {
return convertToList(source, targetRecordClass, null);
}
public static <T, U> List<U> convertToList(List<T> source, Class<U> targetRecordClass, BiFunction<T, U, U> biFunction) {
if (CollectionUtils.isEmpty(source)) {
return Collections.emptyList();
}
List<U> result = new ArrayList<>();
for (T t : source) {
try {
U u = targetRecordClass.getDeclaredConstructor().newInstance();
BeanUtils.copyProperties(t, u);
if (null != biFunction) {
biFunction.apply(t, u);
}
result.add(u);
} catch (NoSuchMethodException e1) {
log.error("convertTo error1", e1);
} catch (Exception e2) {
log.error("convertTo error2", e2);
}
}
return result;
}
public static <T, U> U convertTo(T source, Class<U> targetClass) {
try {
U target = targetClass.getDeclaredConstructor().newInstance();
BeanUtils.copyProperties(source, target);
return target;
} catch (InstantiationException | IllegalAccessException | InvocationTargetException |
NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,32 @@
package org.ruoyi.workflow.util;
import com.baomidou.mybatisplus.extension.conditions.query.QueryChainWrapper;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.base.ThreadContext;
import org.ruoyi.workflow.enums.ErrorEnum;
import static org.ruoyi.workflow.cosntant.AdiConstant.*;
public class PrivilegeUtil {
private PrivilegeUtil() {
}
public static <T> T checkAndGetByUuid(String uuid, QueryChainWrapper<T> lambdaQueryChainWrapper, ErrorEnum exceptionMessage) {
return checkAndGet(null, uuid, lambdaQueryChainWrapper, exceptionMessage);
}
public static <T> T checkAndGet(Long id, String uuid, QueryChainWrapper<T> lambdaQueryChainWrapper, ErrorEnum exceptionMessage) {
T target;
if (Boolean.TRUE.equals(ThreadContext.getCurrentUser().getIsAdmin())) {
target = lambdaQueryChainWrapper.eq(null != id, COLUMN_NAME_ID, id).eq(null != uuid, COLUMN_NAME_UUID, uuid).eq(COLUMN_NAME_IS_DELETE, false).oneOpt().orElse(null);
} else {
target = lambdaQueryChainWrapper.eq(null != id, COLUMN_NAME_ID, id).eq(null != uuid, COLUMN_NAME_UUID, uuid).eq(COLUMN_NAME_USER_ID, ThreadContext.getCurrentUserId()).eq(COLUMN_NAME_IS_DELETE, false).oneOpt().orElse(null);
}
if (null == target) {
throw new BaseException(exceptionMessage.getInfo());
}
return target;
}
}

View File

@@ -0,0 +1,27 @@
package org.ruoyi.workflow.util;
import jakarta.annotation.Resource;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Component;
import java.util.concurrent.TimeUnit;
@Component
public class RedisTemplateUtil {
@Resource
private StringRedisTemplate stringRedisTemplate;
public boolean lock(String key, String clientId, int lockExpireInSecond) {
return Boolean.TRUE.equals(stringRedisTemplate.opsForValue().setIfAbsent(key, clientId, lockExpireInSecond, TimeUnit.SECONDS));
}
public boolean unlock(String key, String clientId) {
boolean result = false;
if (clientId.equals(stringRedisTemplate.opsForValue().get(key))) {
result = Boolean.TRUE.equals(stringRedisTemplate.delete(key));
}
return result;
}
}

View File

@@ -0,0 +1,30 @@
package org.ruoyi.workflow.util;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
@Component
public class SpringUtil implements ApplicationContextAware {
private static ApplicationContext applicationContext;
public static <T> T getBean(String name, Class<T> clazz) {
return applicationContext.getBean(name, clazz);
}
public static <T> T getBean(Class<T> clazz) {
return applicationContext.getBean(clazz);
}
public static String getProperty(String key) {
return applicationContext.getEnvironment().getProperty(key);
}
@Override
public void setApplicationContext(@NotNull ApplicationContext applicationContext) throws BeansException {
SpringUtil.applicationContext = applicationContext;
}
}

View File

@@ -0,0 +1,12 @@
package org.ruoyi.workflow.util;
import java.util.UUID;
public class UuidUtil {
private UuidUtil() {
}
public static String createShort() {
return UUID.randomUUID().toString().replace("-", "");
}
}

View File

@@ -0,0 +1,28 @@
package org.ruoyi.workflow.workflow;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.List;
@Slf4j
@Builder
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CompileNode {
protected String id;
protected Boolean conditional = false;
/**
* 以下两种情况会导致多个nextNode出现
* 1. 下游节点为并行节点,所有的下游节点同时运行
* 2. 当前节点为条件分支节点,下游节点为多个节点,实际执行时只会执行一条
* 两种节点根据是否GraphCompileNode来区分
*/
protected List<CompileNode> nextNodes = new ArrayList<>();
}

View File

@@ -0,0 +1,38 @@
package org.ruoyi.workflow.workflow;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@EqualsAndHashCode(callSuper = true)
@Data
public class GraphCompileNode extends CompileNode {
private CompileNode root;
public void appendToLeaf(CompileNode node) {
boolean exists = false;
CompileNode tail = root;
while (!tail.getNextNodes().isEmpty()) {
tail = tail.getNextNodes().get(0);
if (tail.getId().equals(node.getId())) {
exists = true;
break;
}
}
if (!exists) {
tail.getNextNodes().add(node);
}
}
public CompileNode getTail() {
if (root.nextNodes.isEmpty()) {
return root;
}
CompileNode tail = root.nextNodes.get(0);
while (!tail.getNextNodes().isEmpty()) {
tail = tail.getNextNodes().get(0);
}
return tail;
}
}

View File

@@ -0,0 +1,17 @@
package org.ruoyi.workflow.workflow;
import org.apache.commons.collections4.map.PassiveExpiringMap;
/**
* 已中断正在等待用户输入的流程 <br/>
* TODO 需要考虑项目多节点部署的情况
*/
public class InterruptedFlow {
/**
* 10分钟超时
*/
private static final PassiveExpiringMap.ExpirationPolicy<String, WorkflowEngine> ep = new PassiveExpiringMap.ConstantTimeToLiveExpirationPolicy<>(60 * 1000 * 10);
public static PassiveExpiringMap<String, WorkflowEngine> RUNTIME_TO_GRAPH = new PassiveExpiringMap<>(ep);
}

View File

@@ -0,0 +1,24 @@
package org.ruoyi.workflow.workflow;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import java.util.ArrayList;
import java.util.List;
@Builder
@Data
@AllArgsConstructor
@NoArgsConstructor
public class NodeProcessResult {
private List<NodeIOData> content = new ArrayList<>();
/**
* 条件执行时使用
*/
private String nextNodeUuid;
}

View File

@@ -0,0 +1,50 @@
package org.ruoyi.workflow.workflow;
import lombok.Getter;
import java.util.Arrays;
@Getter
public enum WfComponentNameEnum {
START("Start"),
END("End"),
LLM_ANSWER("Answer"),
DALLE3("Dalle3"),
TONGYI_WANX("Tongyiwanx"),
DOCUMENT_EXTRACTOR("DocumentExtractor"),
KEYWORD_EXTRACTOR("KeywordExtractor"),
FAQ_EXTRACTOR("FaqExtractor"),
KNOWLEDGE_RETRIEVER("KnowledgeRetrieval"),
SWITCHER("Switcher"),
CLASSIFIER("Classifier"),
TEMPLATE("Template"),
GOOGLE_SEARCH("Google"),
HUMAN_FEEDBACK("HumanFeedback"),
MAIL_SEND("MailSend"),
HTTP_REQUEST("HttpRequest");
private final String name;
WfComponentNameEnum(String name) {
this.name = name;
}
public static WfComponentNameEnum getByName(String name) {
return Arrays.stream(WfComponentNameEnum.values()).filter(item -> item.name.equals(name)).findFirst().orElse(null);
}
}

View File

@@ -0,0 +1,23 @@
package org.ruoyi.workflow.workflow;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.workflow.node.AbstractWfNode;
import org.ruoyi.workflow.workflow.node.EndNode;
import org.ruoyi.workflow.workflow.node.answer.LLMAnswerNode;
import org.ruoyi.workflow.workflow.node.start.StartNode;
public class WfNodeFactory {
public static AbstractWfNode create(WorkflowComponent wfComponent, WorkflowNode nodeDefinition,
WfState wfState, WfNodeState nodeState) {
AbstractWfNode wfNode = null;
switch (WfComponentNameEnum.getByName(wfComponent.getName())) {
case START -> wfNode = new StartNode(wfComponent, nodeDefinition, wfState, nodeState);
case LLM_ANSWER -> wfNode = new LLMAnswerNode(wfComponent, nodeDefinition, wfState, nodeState);
case END -> wfNode = new EndNode(wfComponent, nodeDefinition, wfState, nodeState);
default -> {
}
}
return wfNode;
}
}

View File

@@ -0,0 +1,132 @@
package org.ruoyi.workflow.workflow;
import cn.hutool.core.collection.CollUtil;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.collections4.CollectionUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataFilesContent;
import org.ruoyi.workflow.workflow.def.*;
import java.util.*;
import static org.ruoyi.workflow.cosntant.AdiConstant.IMAGE_EXTENSIONS;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.DEFAULT_INPUT_PARAM_NAME;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.DEFAULT_OUTPUT_PARAM_NAME;
public class WfNodeIODataUtil {
public static final Map<WfIODataTypeEnum, Class<? extends WfNodeIO>> INPUT_TYPE_TO_NODE_IO_DEF = new HashMap<>();
static {
INPUT_TYPE_TO_NODE_IO_DEF.put(WfIODataTypeEnum.TEXT, WfNodeIOText.class);
INPUT_TYPE_TO_NODE_IO_DEF.put(WfIODataTypeEnum.BOOL, WfNodeIOBool.class);
INPUT_TYPE_TO_NODE_IO_DEF.put(WfIODataTypeEnum.NUMBER, WfNodeIONumber.class);
INPUT_TYPE_TO_NODE_IO_DEF.put(WfIODataTypeEnum.OPTIONS, WfNodeIOOptions.class);
INPUT_TYPE_TO_NODE_IO_DEF.put(WfIODataTypeEnum.FILES, WfNodeIOFiles.class);
}
public static NodeIOData createNodeIOData(ObjectNode data) {
JsonNode nameObj = data.get("name");
JsonNode content = data.get("content");
if (null == nameObj || null == content) {
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
String name = nameObj.asText();
Integer type = content.get("type").asInt();
String title = content.get("title").asText();
JsonNode value = content.get("value");
NodeIOData result = null;
if (WfIODataTypeEnum.TEXT.getValue().equals(type)) {
result = NodeIOData.createByText(name, title, value.asText());
} else if (WfIODataTypeEnum.NUMBER.getValue().equals(type)) {
result = NodeIOData.createByNumber(name, title, value.asDouble());
} else if (WfIODataTypeEnum.BOOL.getValue().equals(type)) {
result = NodeIOData.createByBool(name, title, value.asBoolean());
} else if (WfIODataTypeEnum.FILES.getValue().equals(type)) {
if (value.isArray()) {
List<String> fileUrls = new ArrayList<>();
Iterator<JsonNode> iterator = value.elements();
while (iterator.hasNext()) {
fileUrls.add(iterator.next().asText());
}
result = NodeIOData.createByFiles(name, title, fileUrls);
}
} else if (WfIODataTypeEnum.OPTIONS.getValue().equals(type)) {
if (value instanceof ObjectNode) {
result = NodeIOData.createByOptions(name, title, JsonUtil.toMap(value));
}
}
return result;
}
/**
* 1.如果没有名称为 output 的输出参数,则需要新增 <br/>
* 2.判断是否已经有文本类型的输出参数,如果有,则复制该参数并将参数名改为 output <br/>
* 3.如果没有文本类型的参数,则复制第一个参数,并将参数名改为 output
*
* @param inputs 输入参数列表
* @return 输出参数列表
*/
public static List<NodeIOData> changeInputsToOutputs(List<NodeIOData> inputs) {
if (CollectionUtils.isEmpty(inputs)) {
return new ArrayList<>();
}
List<NodeIOData> result = CollUtil.newCopyOnWriteArrayList(inputs);
boolean outputExist = false;
NodeIOData defaultInputName = null, txtExist = null, first = null;
for (NodeIOData nodeIOData : result) {
if (null == first) {
first = nodeIOData;
}
if (DEFAULT_OUTPUT_PARAM_NAME.equals(nodeIOData.getName())) {
outputExist = true;
} else if (DEFAULT_INPUT_PARAM_NAME.equals(nodeIOData.getName())) {
defaultInputName = nodeIOData;
} else if (null == txtExist && WfIODataTypeEnum.TEXT.getValue().equals(nodeIOData.getContent().getType())) {
txtExist = nodeIOData;
}
}
if (outputExist) {
return result;
}
if (null != defaultInputName) {
defaultInputName.setName(DEFAULT_OUTPUT_PARAM_NAME);
} else if (null != txtExist) {
txtExist.setName(DEFAULT_OUTPUT_PARAM_NAME);
} else if (null != first) {
first.setName(DEFAULT_OUTPUT_PARAM_NAME);
}
result.add(inputs.get(0));
return result;
}
/**
* 将输入输出中的文件url转成markdown格式的文件地址<br/>
* 将变量渲染到模板时使用该方法,其他情况交由前端处理
*
* @param ioDataList 输入输出列表
*/
public static void changeFilesContentToMarkdown(List<NodeIOData> ioDataList) {
ioDataList.forEach(input -> {
if (input.getContent() instanceof NodeIODataFilesContent filesContent) {
List<String> newValues = new ArrayList<>();
for (String s : filesContent.getValue()) {
if (IMAGE_EXTENSIONS.contains(s.substring(s.lastIndexOf(".") + 1))) {
newValues.add("![" + filesContent.getTitle() + "](" + s + ")");
} else {
newValues.add("[" + filesContent.getTitle() + "](" + s + ")");
}
}
filesContent.setValue(newValues);
}
});
}
}

View File

@@ -0,0 +1,26 @@
package org.ruoyi.workflow.workflow;
import com.fasterxml.jackson.annotation.JsonProperty;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import org.ruoyi.workflow.workflow.def.WfNodeIO;
import org.ruoyi.workflow.workflow.def.WfNodeParamRef;
import org.springframework.validation.annotation.Validated;
import java.util.List;
/**
* 节点的输入参数配置
*/
@Validated
@Data
public class WfNodeInputConfig {
@NotNull
@JsonProperty("user_inputs")
private List<WfNodeIO> userInputs;
@NotNull
@JsonProperty("ref_inputs")
private List<WfNodeParamRef> refInputs;
}

View File

@@ -0,0 +1,53 @@
package org.ruoyi.workflow.workflow;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.bsc.langgraph4j.state.AgentState;
import org.ruoyi.workflow.util.UuidUtil;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import java.io.Serial;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.DEFAULT_INPUT_PARAM_NAME;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.NODE_PROCESS_STATUS_READY;
/**
* 工作流节点实例状态 | workflow node instance state
*/
@Setter
@Getter
@ToString(callSuper = true)
public class WfNodeState extends AgentState implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String uuid = UuidUtil.createShort();
private int processStatus = NODE_PROCESS_STATUS_READY;
private String processStatusRemark = "";
private List<NodeIOData> inputs = new ArrayList<>();
private List<NodeIOData> outputs = new ArrayList<>();
/**
* Constructs an AgentState with the given initial data.
*
* @param initData the initial data for the agent state
*/
public WfNodeState(Map<String, Object> initData) {
super(initData);
}
public WfNodeState() {
super(Map.of());
}
public Optional<NodeIOData> getDefaultInput() {
return inputs.stream().filter(item -> DEFAULT_INPUT_PARAM_NAME.equals(item.getName())).findFirst();
}
}

View File

@@ -0,0 +1,131 @@
package org.ruoyi.workflow.workflow;
import lombok.Getter;
import lombok.Setter;
import org.bsc.langgraph4j.langchain4j.generators.StreamingChatGenerator;
import org.bsc.langgraph4j.state.AgentState;
import org.ruoyi.workflow.dto.workflow.WfRuntimeNodeDto;
import org.ruoyi.workflow.entity.User;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.node.AbstractWfNode;
import java.util.*;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.WORKFLOW_PROCESS_STATUS_READY;
/**
* 工作流实例状态 | workflow instance state
*/
@Setter
@Getter
public class WfState {
private String uuid;
private User user;
private String processingNodeUuid;
//Source node uuid => target node uuid list
private Map<String, List<String>> edges = new HashMap<>();
private Map<String, List<String>> conditionalEdges = new HashMap<>();
//Source node uuid => streaming chat generator
private Map<String, StreamingChatGenerator<AgentState>> nodeToStreamingGenerator = new HashMap<>();
/**
* 已运行节点列表
*/
private List<AbstractWfNode> completedNodes = new LinkedList<>();
private List<WfRuntimeNodeDto> runtimeNodes = new ArrayList<>();
/**
* 工作流接收到的输入(也是开始节点的输入参数)
*/
private List<NodeIOData> input;
/**
* 工作流执行结束后的输出
*/
private List<NodeIOData> output = new ArrayList<>();
private Integer processStatus = WORKFLOW_PROCESS_STATUS_READY;
/**
* 人机交互节点
*/
private Set<String> interruptNodes = new HashSet<>();
public WfState(User user, List<NodeIOData> input, String uuid) {
this.input = input;
this.user = user;
this.uuid = uuid;
}
/**
* 获取最新的输出结果
*
* @return 参数列表
*/
public List<NodeIOData> getLatestOutputs() {
WfNodeState upstreamState = completedNodes.get(completedNodes.size() - 1).getState();
return upstreamState.getOutputs();
}
public Optional<WfNodeState> getNodeStateByNodeUuid(String nodeUuid) {
return this.completedNodes.stream().filter(item -> item.getNode().getUuid().equals(nodeUuid)).map(AbstractWfNode::getState).findFirst();
}
/**
* 新增一条边
* 并行执行分支的情况下会出现一个 source node 对应多个 target node
*
* @param sourceNodeUuid 开始节点
* @param targetNodeUuid 目标节点
*/
public void addEdge(String sourceNodeUuid, String targetNodeUuid) {
List<String> targetNodeUuids = edges.computeIfAbsent(sourceNodeUuid, k -> new ArrayList<>());
targetNodeUuids.add(targetNodeUuid);
}
/**
* 新增一条边
* 按条件执行的分支会出现一个 source node 对应多个 target node 的情况
*
* @param sourceNodeUuid 开始节点
* @param targetNodeUuid 目标节点
*/
public void addConditionalEdge(String sourceNodeUuid, String targetNodeUuid) {
List<String> targetNodeUuids = conditionalEdges.computeIfAbsent(sourceNodeUuid, k -> new ArrayList<>());
targetNodeUuids.add(targetNodeUuid);
}
public List<NodeIOData> getIOByNodeUuid(String nodeUuid) {
List<NodeIOData> result = new ArrayList<>();
Optional<AbstractWfNode> optional = completedNodes.stream().filter(node -> nodeUuid.equals(node.getNode().getUuid())).findFirst();
if (optional.isEmpty()) {
return result;
}
result.addAll(optional.get().getState().getInputs());
result.addAll(optional.get().getState().getOutputs());
return result;
}
public WfRuntimeNodeDto getRuntimeNodeByNodeUuid(String wfNodeUuid) {
WorkflowNode wfNode = getCompletedNodes().stream()
.map(AbstractWfNode::getNode)
.filter(node -> node.getUuid().equals(wfNodeUuid))
.findFirst()
.orElse(null);
if (null == wfNode) {
return null;
}
return getRuntimeNodes().stream()
.filter(item -> item.getNodeId().equals(wfNode.getId()))
.findFirst()
.orElse(null);
}
public void addInterruptNode(String nodeUuid) {
this.interruptNodes.add(nodeUuid);
}
}

View File

@@ -0,0 +1,356 @@
package org.ruoyi.workflow.workflow;
import cn.hutool.core.collection.CollStreamUtil;
import cn.hutool.core.collection.CollUtil;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.bsc.async.AsyncGenerator;
import org.bsc.langgraph4j.*;
import org.bsc.langgraph4j.checkpoint.MemorySaver;
import org.bsc.langgraph4j.langchain4j.generators.StreamingChatGenerator;
import org.bsc.langgraph4j.state.AgentState;
import org.bsc.langgraph4j.state.StateSnapshot;
import org.bsc.langgraph4j.streaming.StreamingOutput;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.base.NodeInputConfigTypeHandler;
import org.ruoyi.workflow.dto.workflow.WfRuntimeNodeDto;
import org.ruoyi.workflow.dto.workflow.WfRuntimeResp;
import org.ruoyi.workflow.entity.*;
import org.ruoyi.workflow.enums.ErrorEnum;
import org.ruoyi.workflow.helper.SSEEmitterHelper;
import org.ruoyi.workflow.service.WorkflowRuntimeNodeService;
import org.ruoyi.workflow.service.WorkflowRuntimeService;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.def.WfNodeIO;
import org.ruoyi.workflow.workflow.def.WfNodeParamRef;
import org.ruoyi.workflow.workflow.node.AbstractWfNode;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.util.*;
import java.util.function.Function;
import static org.bsc.langgraph4j.StateGraph.END;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.*;
import static org.ruoyi.workflow.enums.ErrorEnum.*;
@Slf4j
public class WorkflowEngine {
private final Workflow workflow;
private final List<WorkflowComponent> components;
private final List<WorkflowNode> wfNodes;
private final List<WorkflowEdge> wfEdges;
private final SSEEmitterHelper sseEmitterHelper;
private final WorkflowRuntimeService workflowRuntimeService;
private final WorkflowRuntimeNodeService workflowRuntimeNodeService;
private CompiledGraph<WfNodeState> app;
private SseEmitter sseEmitter;
private User user;
private WfState wfState;
private WfRuntimeResp wfRuntimeResp;
public WorkflowEngine(
Workflow workflow,
SSEEmitterHelper sseEmitterHelper,
List<WorkflowComponent> components,
List<WorkflowNode> nodes,
List<WorkflowEdge> wfEdges,
WorkflowRuntimeService workflowRuntimeService,
WorkflowRuntimeNodeService workflowRuntimeNodeService) {
this.workflow = workflow;
this.sseEmitterHelper = sseEmitterHelper;
this.components = components;
this.wfNodes = nodes;
this.wfEdges = wfEdges;
this.workflowRuntimeService = workflowRuntimeService;
this.workflowRuntimeNodeService = workflowRuntimeNodeService;
}
public void run(User user, List<ObjectNode> userInputs, SseEmitter sseEmitter) {
this.user = user;
this.sseEmitter = sseEmitter;
log.info("WorkflowEngine run,userId:{},workflowUuid:{},userInputs:{}", user.getId(), workflow.getUuid(), userInputs);
if (!this.workflow.getIsEnable()) {
sseEmitterHelper.sendErrorAndComplete(user.getId(), sseEmitter, ErrorEnum.A_WF_DISABLED.getInfo());
throw new BaseException(ErrorEnum.A_WF_DISABLED.getInfo());
}
Long workflowId = this.workflow.getId();
this.wfRuntimeResp = workflowRuntimeService.create(user, workflowId);
this.sseEmitterHelper.startSse(user, sseEmitter, JsonUtil.toJson(wfRuntimeResp));
String runtimeUuid = this.wfRuntimeResp.getUuid();
try {
Pair<WorkflowNode, Set<WorkflowNode>> startAndEnds = findStartAndEndNode();
WorkflowNode startNode = startAndEnds.getLeft();
List<NodeIOData> wfInputs = getAndCheckUserInput(userInputs, startNode);
this.wfState = new WfState(user, wfInputs, runtimeUuid);
workflowRuntimeService.updateInput(this.wfRuntimeResp.getId(), wfState);
WorkflowGraphBuilder graphBuilder = new WorkflowGraphBuilder(
components,
wfNodes,
wfEdges,
this::runNode,
this.wfState);
StateGraph<WfNodeState> mainStateGraph = graphBuilder.build(startNode);
MemorySaver saver = new MemorySaver();
CompileConfig compileConfig = CompileConfig.builder().checkpointSaver(saver)
.interruptBefore(wfState.getInterruptNodes().toArray(String[]::new))
.build();
app = mainStateGraph.compile(compileConfig);
RunnableConfig invokeConfig = RunnableConfig.builder().build();
exe(invokeConfig, false);
} catch (Exception e) {
errorWhenExe(e);
}
}
private void exe(RunnableConfig invokeConfig, boolean resume) {
//不使用langgraph4j state的update相关方法无需传入input
AsyncGenerator<NodeOutput<WfNodeState>> outputs = app.stream(resume ? null : Map.of(), invokeConfig);
streamingResult(wfState, outputs, sseEmitter);
StateSnapshot<WfNodeState> stateSnapshot = app.getState(invokeConfig);
String nextNode = stateSnapshot.config().nextNode().orElse("");
//还有下个节点表示进入中断状态等待用户输入后继续执<E7BBAD>?
if (StringUtils.isNotBlank(nextNode) && !nextNode.equalsIgnoreCase(END)) {
String intTip = WorkflowUtil.getHumanFeedbackTip(nextNode, wfNodes);
//将等待输入信息[事件与提示词]发送到到客户端
SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_WAIT_FEEDBACK_BY_" + nextNode + "]", intTip);
InterruptedFlow.RUNTIME_TO_GRAPH.put(wfState.getUuid(), this);
//更新状<E696B0>?
wfState.setProcessStatus(WORKFLOW_PROCESS_STATUS_WAITING_INPUT);
workflowRuntimeService.updateOutput(wfRuntimeResp.getId(), wfState);
} else {
WorkflowRuntime updatedRuntime = workflowRuntimeService.updateOutput(wfRuntimeResp.getId(), wfState);
sseEmitterHelper.sendComplete(user.getId(), sseEmitter, updatedRuntime.getOutput());
InterruptedFlow.RUNTIME_TO_GRAPH.remove(wfState.getUuid());
}
}
/**
* 中断流程等待用户输入时,会进行暂停状态,用户输入后调用本方法执行流程剩余部分
*
* @param userInput 用户输入
*/
public void resume(String userInput) {
RunnableConfig invokeConfig = RunnableConfig.builder().build();
try {
app.updateState(invokeConfig, Map.of(HUMAN_FEEDBACK_KEY, userInput), null);
exe(invokeConfig, true);
} catch (Exception e) {
errorWhenExe(e);
} finally {
//有可能多次接收人机交互,待整个流程完全执行后才能删除
if (wfState.getProcessStatus() != WORKFLOW_PROCESS_STATUS_WAITING_INPUT) {
InterruptedFlow.RUNTIME_TO_GRAPH.remove(wfState.getUuid());
}
}
}
private void errorWhenExe(Exception e) {
log.error("error", e);
String errorMsg = e.getMessage();
if (errorMsg.contains("parallel node doesn't support conditional branch")) {
errorMsg = "并行节点中不能包含条件分<EFBFBD>?";
}
sseEmitterHelper.sendErrorAndComplete(user.getId(), sseEmitter, errorMsg);
workflowRuntimeService.updateStatus(wfRuntimeResp.getId(), WORKFLOW_PROCESS_STATUS_FAIL, errorMsg);
}
private Map<String, Object> runNode(WorkflowNode wfNode, WfNodeState nodeState) {
Map<String, Object> resultMap = new HashMap<>();
try {
WorkflowComponent wfComponent = components.stream().filter(item -> item.getId().equals(wfNode.getWorkflowComponentId())).findFirst().orElseThrow();
AbstractWfNode abstractWfNode = WfNodeFactory.create(wfComponent, wfNode, wfState, nodeState);
//节点实例
WfRuntimeNodeDto runtimeNodeDto = workflowRuntimeNodeService.createByState(user, wfNode.getId(), wfRuntimeResp.getId(), nodeState);
wfState.getRuntimeNodes().add(runtimeNodeDto);
SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_RUN_" + wfNode.getUuid() + "]", JsonUtil.toJson(runtimeNodeDto));
NodeProcessResult processResult = abstractWfNode.process((is) -> {
workflowRuntimeNodeService.updateInput(runtimeNodeDto.getId(), nodeState);
List<NodeIOData> nodeIODataList = nodeState.getInputs();
// if (!wfNode.getWorkflowComponentId().equals(1L)) {
// String inputConfig = wfNode.getInputConfig();
// WfNodeInputConfig nodeInputConfig = NodeInputConfigTypeHandler.fillNodeInputConfig(inputConfig);
// List<WfNodeParamRef> refInputs = nodeInputConfig.getRefInputs();
// Set<String> nameSet = CollStreamUtil.toSet(refInputs, WfNodeParamRef::getNodeParamName);
// if (CollUtil.isNotEmpty(nameSet)) {
// nodeIODataList = nodeIODataList.stream().filter(item -> nameSet.contains(item.getName()))
// .collect(Collectors.toList());
// } else {
// nodeIODataList = nodeIODataList.stream().filter(item -> item.getName().contains("input"))
// .collect(Collectors.toList());
// }
// }
for (NodeIOData input : nodeIODataList) {
String inputConfig = wfNode.getInputConfig();
WfNodeInputConfig nodeInputConfig = NodeInputConfigTypeHandler.fillNodeInputConfig(inputConfig);
List<WfNodeParamRef> refInputs = nodeInputConfig.getRefInputs();
if (CollUtil.isNotEmpty(refInputs) && "input".equals(input.getName())) {
continue;
}
SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_INPUT_" + wfNode.getUuid() + "]", JsonUtil.toJson(input));
}
}, (is) -> {
workflowRuntimeNodeService.updateOutput(runtimeNodeDto.getId(), nodeState);
//并行节点内部的节点执行结束后需要主动向客户端发送输出结<E587BA>?
String nodeUuid = wfNode.getUuid();
List<NodeIOData> nodeOutputs = nodeState.getOutputs();
for (NodeIOData output : nodeOutputs) {
log.info("callback node:{},output:{}", nodeUuid, output.getContent());
SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_OUTPUT_" + nodeUuid + "]", JsonUtil.toJson(output));
}
});
if (StringUtils.isNotBlank(processResult.getNextNodeUuid())) {
resultMap.put("next", processResult.getNextNodeUuid());
}
} catch (Exception e) {
log.error("Node run error", e);
throw new BaseException(ErrorEnum.B_WF_RUN_ERROR.getInfo());
}
resultMap.put("name", wfNode.getTitle());
//langgraph4j state中的data不做数据存储只存储元数<E58583>?
StreamingChatGenerator<AgentState> generator = wfState.getNodeToStreamingGenerator().get(wfNode.getUuid());
if (null != generator) {
resultMap.put("_streaming_messages", generator);
return resultMap;
}
return resultMap;
}
/**
* 流式输出结果
*
* @param outputs 输出
* @param sseEmitter sse emitter
*/
private void streamingResult(WfState wfState, AsyncGenerator<NodeOutput<WfNodeState>> outputs, SseEmitter sseEmitter) {
for (NodeOutput<WfNodeState> out : outputs) {
if (out instanceof StreamingOutput<WfNodeState> streamingOutput) {
String node = streamingOutput.node();
String chunk = streamingOutput.chunk();
log.info("node:{},chunk:{}", node, chunk);
Map<String, String> strMap = new HashMap<>();
strMap.put("ck", chunk);
// SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_CHUNK_" + node + "]", strMap.toString());
SSEEmitterHelper.parseAndSendPartialMsg(sseEmitter, "[NODE_CHUNK_" + node + "]", chunk);
} else {
AbstractWfNode abstractWfNode = wfState.getCompletedNodes().stream()
.filter(item -> item.getNode().getUuid().endsWith(out.node())).findFirst().orElse(null);
if (null != abstractWfNode) {
WfRuntimeNodeDto runtimeNodeDto = wfState.getRuntimeNodeByNodeUuid(out.node());
if (null != runtimeNodeDto) {
workflowRuntimeNodeService.updateOutput(runtimeNodeDto.getId(), abstractWfNode.getState());
wfState.setOutput(abstractWfNode.getState().getOutputs());
} else {
log.warn("Can not find runtime node, node uuid:{}", out.node());
}
} else {
log.warn("Can not find node state,node uuid:{}", out.node());
}
}
}
}
/**
* 校验用户输入并组装成工作流的输入
*
* @param userInputs 用户输入
* @param startNode 开始节点定<E782B9>?
* @return 正确的用户输入列<E585A5>?
*/
private List<NodeIOData> getAndCheckUserInput(List<ObjectNode> userInputs, WorkflowNode startNode) {
WfNodeInputConfig wfNodeInputConfig = NodeInputConfigTypeHandler.fillNodeInputConfig(startNode.getInputConfig());
List<WfNodeIO> defList = wfNodeInputConfig.getUserInputs();
defList = CollStreamUtil.toList(defList, Function.identity());
List<NodeIOData> wfInputs = new ArrayList<>();
for (WfNodeIO paramDefinition : defList) {
String paramNameFromDef = paramDefinition.getName();
boolean requiredParamMissing = paramDefinition.getRequired();
for (ObjectNode userInput : userInputs) {
NodeIOData nodeIOData = WfNodeIODataUtil.createNodeIOData(userInput);
if (!paramNameFromDef.equalsIgnoreCase(nodeIOData.getName())) {
continue;
}
Integer dataType = nodeIOData.getContent().getType();
if (null == dataType) {
throw new BaseException(A_WF_INPUT_INVALID.getInfo());
}
requiredParamMissing = false;
boolean valid = paramDefinition.checkValue(nodeIOData);
if (!valid) {
log.error("用户输入无效,workflowId:{}", startNode.getWorkflowId());
throw new BaseException(ErrorEnum.A_WF_INPUT_INVALID.getInfo());
}
wfInputs.add(nodeIOData);
}
if (requiredParamMissing) {
log.error("在流程定义中必填的参数没有传进来,name:{}", paramNameFromDef);
throw new BaseException(A_WF_INPUT_MISSING.getInfo());
}
}
return wfInputs;
}
/**
* 查找开始及结束节点 <br/>
* 开始节点只能有一个,结束节点可能多个
*
* @return 开始节点及结束节点列表
*/
public Pair<WorkflowNode, Set<WorkflowNode>> findStartAndEndNode() {
WorkflowNode startNode = null;
Set<WorkflowNode> endNodes = new HashSet<>();
for (WorkflowNode node : wfNodes) {
Optional<WorkflowComponent> wfComponent = components.stream().filter(item -> item.getId().equals(node.getWorkflowComponentId())).findFirst();
if (wfComponent.isPresent() && WfComponentNameEnum.START.getName().equals(wfComponent.get().getName())) {
if (null != startNode) {
throw new BaseException(ErrorEnum.A_WF_MULTIPLE_START_NODE.getInfo());
}
startNode = node;
} else if (wfComponent.isPresent() && WfComponentNameEnum.END.getName().equals(wfComponent.get().getName())) {
endNodes.add(node);
}
}
if (null == startNode) {
log.error("没有开始节点, workflowId:{}", wfNodes.get(0).getWorkflowId());
throw new BaseException(ErrorEnum.A_WF_START_NODE_NOT_FOUND.getInfo());
}
//Find all end nodes
wfNodes.forEach(item -> {
String nodeUuid = item.getUuid();
boolean source = false;
boolean target = false;
for (WorkflowEdge edgeDef : wfEdges) {
if (edgeDef.getSourceNodeUuid().equals(nodeUuid)) {
source = true;
} else if (edgeDef.getTargetNodeUuid().equals(nodeUuid)) {
target = true;
}
}
if (!source && target) {
endNodes.add(item);
}
});
log.info("start node:{}", startNode);
log.info("end nodes:{}", endNodes);
if (endNodes.isEmpty()) {
log.error("没有结束节点,workflowId:{}", startNode.getWorkflowId());
throw new BaseException(A_WF_END_NODE_NOT_FOUND.getInfo());
}
return Pair.of(startNode, endNodes);
}
public CompiledGraph<WfNodeState> getApp() {
return app;
}
}

View File

@@ -0,0 +1,257 @@
package org.ruoyi.workflow.workflow;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.bsc.langgraph4j.GraphStateException;
import org.bsc.langgraph4j.StateGraph;
import org.bsc.langgraph4j.serializer.std.ObjectStreamStateSerializer;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowEdge;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.enums.ErrorEnum;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.bsc.langgraph4j.StateGraph.END;
import static org.bsc.langgraph4j.StateGraph.START;
import static org.bsc.langgraph4j.action.AsyncEdgeAction.edge_async;
import static org.bsc.langgraph4j.action.AsyncNodeAction.node_async;
import static org.ruoyi.workflow.workflow.WfComponentNameEnum.HUMAN_FEEDBACK;
/**
* 负责构建工作流运行所依赖的状态图<E68081>?
*/
@Slf4j
public class WorkflowGraphBuilder {
private final Map<Long, WorkflowComponent> componentIndex;
private final Map<String, WorkflowNode> nodeIndex;
private final Map<String, List<WorkflowEdge>> edgesBySource;
private final Map<String, List<WorkflowEdge>> edgesByTarget;
private final WorkflowNodeRunner nodeRunner;
private final WfState wfState;
private final ObjectStreamStateSerializer<WfNodeState> stateSerializer = new ObjectStreamStateSerializer<>(WfNodeState::new);
private final Map<String, List<StateGraph<WfNodeState>>> stateGraphNodes = new HashMap<>();
private final Map<String, List<StateGraph<WfNodeState>>> stateGraphEdges = new HashMap<>();
private final Map<String, String> rootToSubGraph = new HashMap<>();
private final Map<String, GraphCompileNode> nodeToParallelBranch = new HashMap<>();
public WorkflowGraphBuilder(
List<WorkflowComponent> components,
List<WorkflowNode> nodes,
List<WorkflowEdge> edges,
WorkflowNodeRunner nodeRunner,
WfState wfState) {
this.componentIndex = components.stream()
.collect(Collectors.toMap(WorkflowComponent::getId, Function.identity(), (origin, ignore) -> origin));
this.nodeIndex = nodes.stream()
.collect(Collectors.toMap(WorkflowNode::getUuid, Function.identity(), (origin, ignore) -> origin));
this.edgesBySource = edges.stream().collect(Collectors.groupingBy(WorkflowEdge::getSourceNodeUuid));
this.edgesByTarget = edges.stream().collect(Collectors.groupingBy(WorkflowEdge::getTargetNodeUuid));
this.nodeRunner = nodeRunner;
this.wfState = wfState;
}
public StateGraph<WfNodeState> build(WorkflowNode startNode) throws GraphStateException {
CompileNode rootCompileNode = new CompileNode();
rootCompileNode.setId(startNode.getUuid());
buildCompileNode(rootCompileNode, startNode);
StateGraph<WfNodeState> mainStateGraph = new StateGraph<>(stateSerializer);
wfState.addEdge(START, startNode.getUuid());
buildStateGraph(null, mainStateGraph, rootCompileNode);
return mainStateGraph;
}
private void buildCompileNode(CompileNode parentNode, WorkflowNode node) {
log.info("buildCompileNode, parentNode:{}, node:{}, title:{}", parentNode.getId(), node.getUuid(), node.getTitle());
CompileNode newNode;
List<String> upstreamNodeUuids = getUpstreamNodeUuids(node.getUuid());
if (upstreamNodeUuids.isEmpty()) {
log.error("节点{}没有上游节点", node.getUuid());
newNode = parentNode;
} else if (upstreamNodeUuids.size() == 1) {
String upstreamUuid = upstreamNodeUuids.get(0);
boolean pointToParallel = pointToParallelBranch(upstreamUuid);
if (pointToParallel) {
String rootId = node.getUuid();
GraphCompileNode graphCompileNode = getOrCreateGraphCompileNode(rootId);
appendToNextNodes(parentNode, graphCompileNode);
newNode = graphCompileNode;
} else if (parentNode instanceof GraphCompileNode graphCompileNode) {
newNode = CompileNode.builder().id(node.getUuid()).conditional(false).nextNodes(new ArrayList<>()).build();
graphCompileNode.appendToLeaf(newNode);
} else {
newNode = CompileNode.builder().id(node.getUuid()).conditional(false).nextNodes(new ArrayList<>()).build();
appendToNextNodes(parentNode, newNode);
}
} else {
newNode = CompileNode.builder().id(node.getUuid()).conditional(false).nextNodes(new ArrayList<>()).build();
GraphCompileNode parallelBranch = nodeToParallelBranch.get(parentNode.getId());
appendToNextNodes(Objects.requireNonNullElse(parallelBranch, parentNode), newNode);
}
if (newNode == null) {
log.error("节点:{}不存<E4B88D>?", node.getUuid());
return;
}
for (String downstream : getDownstreamNodeUuids(node.getUuid())) {
WorkflowNode downstreamNode = nodeIndex.get(downstream);
if (downstreamNode != null) {
buildCompileNode(newNode, downstreamNode);
}
}
}
private void buildStateGraph(CompileNode upstreamCompileNode,
StateGraph<WfNodeState> stateGraph,
CompileNode compileNode) throws GraphStateException {
log.info("buildStateGraph, upstream:{}, node:{}", upstreamCompileNode, compileNode.getId());
String stateGraphNodeUuid = compileNode.getId();
if (upstreamCompileNode == null) {
addNodeToStateGraph(stateGraph, stateGraphNodeUuid);
addEdgeToStateGraph(stateGraph, START, compileNode.getId());
} else {
if (compileNode instanceof GraphCompileNode graphCompileNode) {
String stateGraphId = graphCompileNode.getId();
CompileNode root = graphCompileNode.getRoot();
String rootId = root.getId();
String existSubGraphId = rootToSubGraph.get(rootId);
if (StringUtils.isBlank(existSubGraphId)) {
StateGraph<WfNodeState> subgraph = new StateGraph<>(stateSerializer);
addNodeToStateGraph(subgraph, rootId);
addEdgeToStateGraph(subgraph, START, rootId);
for (CompileNode child : root.getNextNodes()) {
buildStateGraph(root, subgraph, child);
}
addEdgeToStateGraph(subgraph, graphCompileNode.getTail().getId(), END);
stateGraph.addNode(stateGraphId, subgraph.compile());
rootToSubGraph.put(rootId, stateGraphId);
stateGraphNodeUuid = stateGraphId;
} else {
stateGraphNodeUuid = existSubGraphId;
}
} else {
addNodeToStateGraph(stateGraph, stateGraphNodeUuid);
}
if (Boolean.FALSE.equals(upstreamCompileNode.getConditional())) {
addEdgeToStateGraph(stateGraph, upstreamCompileNode.getId(), stateGraphNodeUuid);
}
}
List<CompileNode> nextNodes = compileNode.getNextNodes();
if (nextNodes.size() > 1) {
boolean conditional = nextNodes.stream().noneMatch(item -> item instanceof GraphCompileNode);
compileNode.setConditional(conditional);
for (CompileNode nextNode : nextNodes) {
buildStateGraph(compileNode, stateGraph, nextNode);
}
if (conditional) {
List<String> targets = nextNodes.stream().map(CompileNode::getId).toList();
Map<String, String> mappings = new HashMap<>();
for (String target : targets) {
mappings.put(target, target);
}
stateGraph.addConditionalEdges(
stateGraphNodeUuid,
edge_async(state -> state.data().get("next").toString()),
mappings
);
}
} else if (nextNodes.size() == 1) {
for (CompileNode nextNode : nextNodes) {
buildStateGraph(compileNode, stateGraph, nextNode);
}
} else {
addEdgeToStateGraph(stateGraph, stateGraphNodeUuid, END);
}
}
private GraphCompileNode getOrCreateGraphCompileNode(String rootId) {
GraphCompileNode exist = nodeToParallelBranch.get(rootId);
if (exist == null) {
GraphCompileNode graphCompileNode = new GraphCompileNode();
graphCompileNode.setId("parallel_" + rootId);
graphCompileNode.setRoot(CompileNode.builder().id(rootId).conditional(false).nextNodes(new ArrayList<>()).build());
nodeToParallelBranch.put(rootId, graphCompileNode);
exist = graphCompileNode;
}
return exist;
}
private List<String> getUpstreamNodeUuids(String nodeUuid) {
return edgesByTarget.getOrDefault(nodeUuid, List.of())
.stream()
.map(WorkflowEdge::getSourceNodeUuid)
.toList();
}
private List<String> getDownstreamNodeUuids(String nodeUuid) {
return edgesBySource.getOrDefault(nodeUuid, List.of())
.stream()
.map(WorkflowEdge::getTargetNodeUuid)
.toList();
}
private boolean pointToParallelBranch(String nodeUuid) {
return edgesBySource.getOrDefault(nodeUuid, List.of())
.stream()
.filter(edge -> StringUtils.isBlank(edge.getSourceHandle()))
.count() > 1;
}
private void addNodeToStateGraph(StateGraph<WfNodeState> stateGraph, String stateGraphNodeUuid) throws GraphStateException {
List<StateGraph<WfNodeState>> stateGraphList = stateGraphNodes.computeIfAbsent(stateGraphNodeUuid, k -> new ArrayList<>());
boolean exist = stateGraphList.stream().anyMatch(item -> item == stateGraph);
if (exist) {
log.info("state graph node exist,stateGraphNodeUuid:{}", stateGraphNodeUuid);
return;
}
log.info("addNodeToStateGraph,node uuid:{}", stateGraphNodeUuid);
WorkflowNode wfNode = getNodeByUuid(stateGraphNodeUuid);
stateGraph.addNode(stateGraphNodeUuid, node_async(state -> nodeRunner.run(wfNode, state)));
stateGraphList.add(stateGraph);
WorkflowComponent component = componentIndex.get(wfNode.getWorkflowComponentId());
if (component == null) {
throw new BaseException(ErrorEnum.A_PARAMS_ERROR.getInfo());
}
if (HUMAN_FEEDBACK.getName().equals(component.getName())) {
wfState.addInterruptNode(stateGraphNodeUuid);
}
}
private void addEdgeToStateGraph(StateGraph<WfNodeState> stateGraph, String source, String target) throws GraphStateException {
String key = source + "_" + target;
List<StateGraph<WfNodeState>> stateGraphList = stateGraphEdges.computeIfAbsent(key, k -> new ArrayList<>());
boolean exist = stateGraphList.stream().anyMatch(item -> item == stateGraph);
if (exist) {
log.info("state graph edge exist,source:{},target:{}", source, target);
return;
}
log.info("addEdgeToStateGraph,source:{},target:{}", source, target);
stateGraph.addEdge(source, target);
stateGraphList.add(stateGraph);
}
private WorkflowNode getNodeByUuid(String nodeUuid) {
WorkflowNode workflowNode = nodeIndex.get(nodeUuid);
if (workflowNode == null) {
throw new BaseException(ErrorEnum.A_WF_NODE_NOT_FOUND.getInfo());
}
return workflowNode;
}
private void appendToNextNodes(CompileNode compileNode, CompileNode newNode) {
boolean exist = compileNode.getNextNodes().stream().anyMatch(item -> item.getId().equals(newNode.getId()));
if (!exist) {
compileNode.getNextNodes().add(newNode);
}
}
}

View File

@@ -0,0 +1,14 @@
package org.ruoyi.workflow.workflow;
import org.ruoyi.workflow.entity.WorkflowNode;
import java.util.Map;
/**
* 回调接口,负责执行业务节点并返回下游编排所需的元数据。
*/
@FunctionalInterface
public interface WorkflowNodeRunner {
Map<String, Object> run(WorkflowNode node, WfNodeState nodeState);
}

View File

@@ -0,0 +1,95 @@
package org.ruoyi.workflow.workflow;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.entity.*;
import org.ruoyi.workflow.helper.SSEEmitterHelper;
import org.ruoyi.workflow.service.*;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import java.util.List;
import static org.ruoyi.workflow.cosntant.AdiConstant.SSE_TIMEOUT;
import static org.ruoyi.workflow.enums.ErrorEnum.*;
@Slf4j
@Component
public class WorkflowStarter {
@Lazy
@Resource
private WorkflowStarter self;
@Resource
private WorkflowService workflowService;
@Resource
private WorkflowNodeService workflowNodeService;
@Resource
private WorkflowEdgeService workflowEdgeService;
@Resource
private WorkflowComponentService workflowComponentService;
@Resource
private WorkflowRuntimeService workflowRuntimeService;
@Resource
private WorkflowRuntimeNodeService workflowRuntimeNodeService;
@Resource
private SSEEmitterHelper sseEmitterHelper;
public SseEmitter streaming(User user, String workflowUuid, List<ObjectNode> userInputs) {
SseEmitter sseEmitter = new SseEmitter(SSE_TIMEOUT);
if (!sseEmitterHelper.checkOrComplete(user, sseEmitter)) {
return sseEmitter;
}
Workflow workflow = workflowService.getByUuid(workflowUuid);
if (null == workflow) {
sseEmitterHelper.sendErrorAndComplete(user.getId(), sseEmitter, A_WF_NOT_FOUND.getInfo());
return sseEmitter;
} else if (Boolean.FALSE.equals(workflow.getIsEnable())) {
sseEmitterHelper.sendErrorAndComplete(user.getId(), sseEmitter, A_WF_DISABLED.getInfo());
return sseEmitter;
}
self.asyncRun(user, workflow, userInputs, sseEmitter);
return sseEmitter;
}
@Async
public void asyncRun(User user, Workflow workflow, List<ObjectNode> userInputs, SseEmitter sseEmitter) {
log.info("WorkflowEngine run,userId:{},workflowUuid:{},userInputs:{}", user.getId(), workflow.getUuid(), userInputs);
List<WorkflowComponent> components = workflowComponentService.getAllEnable();
List<WorkflowNode> nodes = workflowNodeService.lambdaQuery()
.eq(WorkflowNode::getWorkflowId, workflow.getId())
.eq(WorkflowNode::getIsDeleted, false)
.list();
List<WorkflowEdge> edges = workflowEdgeService.lambdaQuery()
.eq(WorkflowEdge::getWorkflowId, workflow.getId())
.eq(WorkflowEdge::getIsDeleted, false)
.list();
WorkflowEngine workflowEngine = new WorkflowEngine(workflow,
sseEmitterHelper, components, nodes, edges,
workflowRuntimeService, workflowRuntimeNodeService);
workflowEngine.run(user, userInputs, sseEmitter);
}
@Async
public void resumeFlow(String runtimeUuid, String userInput) {
WorkflowEngine workflowEngine = InterruptedFlow.RUNTIME_TO_GRAPH.get(runtimeUuid);
if (null == workflowEngine) {
log.error("工作流恢复执行时失败,runtime:{}", runtimeUuid);
throw new BaseException(A_WF_RESUME_FAIL.getInfo());
}
workflowEngine.resume(userInput);
}
}

View File

@@ -0,0 +1,162 @@
package org.ruoyi.workflow.workflow;
import cn.hutool.core.collection.CollStreamUtil;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
import dev.langchain4j.data.message.UserMessage;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.bsc.langgraph4j.langchain4j.generators.StreamingChatGenerator;
import org.bsc.langgraph4j.state.AgentState;
import org.ruoyi.chat.factory.ChatServiceFactory;
import org.ruoyi.chat.service.chat.IChatService;
import org.ruoyi.common.chat.entity.chat.Message;
import org.ruoyi.common.chat.request.ChatRequest;
import org.ruoyi.workflow.base.NodeInputConfigTypeHandler;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataContent;
import org.ruoyi.workflow.workflow.def.WfNodeParamRef;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.DEFAULT_OUTPUT_PARAM_NAME;
@Slf4j
@Component
public class WorkflowUtil {
@Resource
private ChatServiceFactory chatServiceFactory;
@SuppressWarnings("unchecked")
public static String renderTemplate(String template, List<NodeIOData> values) {
String result = template;
for (NodeIOData next : values) {
String name = next.getName();
NodeIODataContent<?> dataContent = next.getContent();
if (dataContent.getType().equals(WfIODataTypeEnum.FILES.getValue())) {
List<String> value = (List<String>) dataContent.getValue();
result = result.replace("{" + name + "}", String.join(",", value));
} else if (dataContent.getType().equals(WfIODataTypeEnum.OPTIONS.getValue())) {
Map<String, Object> value = (Map<String, Object>) dataContent.getValue();
result = result.replace("{" + name + "}", value.toString());
} else {
result = result.replace("{" + name + "}", dataContent.getValue().toString());
}
}
return result;
}
public static String getHumanFeedbackTip(String nodeUuid, List<WorkflowNode> wfNodes) {
WorkflowNode wfNode = wfNodes.stream()
.filter(item -> item.getUuid().equals(nodeUuid))
.findFirst().orElse(null);
if (null == wfNode) {
return "";
}
String wfNodeNodeConfig = wfNode.getNodeConfig();
if (StrUtil.isBlank(wfNodeNodeConfig)) {
return "";
}
Map<String, Object> map = JsonUtil.toMap(wfNodeNodeConfig);
Object tip = map.getOrDefault("tip", "");
return String.valueOf(tip);
}
public void streamingInvokeLLM(WfState wfState, WfNodeState state, WorkflowNode node, String category,
String modelName, List<UserMessage> systemMessage) {
log.info("stream invoke, category: {}, modelName: {}", category, modelName);
// 根据 category 获取对应的 ChatService不使用计费代理工作流场景单独计费
IChatService chatService = chatServiceFactory.getOriginalService(category);
StreamingChatGenerator<AgentState> streamingGenerator = StreamingChatGenerator.builder()
.mapResult(response -> {
String responseTxt = response.aiMessage().text();
log.info("llm response:{}", responseTxt);
NodeIOData output = NodeIOData.createByText(DEFAULT_OUTPUT_PARAM_NAME, "", responseTxt);
wfState.getNodeStateByNodeUuid(node.getUuid()).ifPresent(item -> item.getOutputs().add(output));
return Map.of("completeResult", response.aiMessage().text());
})
.startingNode(node.getUuid())
.startingState(state)
.build();
// 构建 ruoyi-ai 的 ChatRequest
List<Message> messages = new ArrayList<>();
addUserMessage(node, state.getInputs(), messages);
addSystemMessage(systemMessage, messages);
ChatRequest chatRequest = new ChatRequest();
chatRequest.setModel(modelName);
chatRequest.setMessages(messages);
// 使用工作流专用方法
chatService.chat(chatRequest, streamingGenerator.handler());
wfState.getNodeToStreamingGenerator().put(node.getUuid(), streamingGenerator);
}
/**
* 添加用户信息
*
* @param node
* @param messages
*/
private void addUserMessage(WorkflowNode node, List<NodeIOData> userMessage, List<Message> messages) {
if (CollUtil.isEmpty(userMessage)) {
return;
}
WfNodeInputConfig nodeInputConfig = NodeInputConfigTypeHandler.fillNodeInputConfig(node.getInputConfig());
List<WfNodeParamRef> refInputs = nodeInputConfig.getRefInputs();
Set<String> nameSet = CollStreamUtil.toSet(refInputs, WfNodeParamRef::getName);
userMessage.stream().filter(item -> nameSet.contains(item.getName()))
.map(item -> getMessage("role", item.getContent().getValue())).forEach(messages::add);
if (CollUtil.isNotEmpty(messages)) {
return;
}
userMessage.stream().filter(item -> "input".equals(item.getName()))
.map(item -> getMessage("role", item.getContent().getValue())).forEach(messages::add);
}
/**
* 组装message对象
*
* @param role
* @param value
* @return
*/
private Message getMessage(String role, Object value) {
Message message = new Message();
message.setContent(String.valueOf(value));
message.setRole(role);
return message;
}
/**
* 添加系统信息
*
* @param systemMessage
* @param messages
*/
private void addSystemMessage(List<UserMessage> systemMessage, List<Message> messages) {
if (CollUtil.isEmpty(systemMessage)) {
return;
}
systemMessage.stream().map(userMsg -> getMessage("system", userMsg.singleText())).forEach(messages::add);
}
}

View File

@@ -0,0 +1,63 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Builder;
import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
/**
* 工作流节点输入输出数据
*/
@Builder
@Data
public class NodeIOData implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
protected String name;
protected NodeIODataContent<?> content;
public static NodeIOData createByText(String name, String title, String value) {
NodeIODataTextContent datContent = new NodeIODataTextContent();
datContent.setValue(value);
datContent.setTitle(title);
return NodeIOData.builder().name(name).content(datContent).build();
}
public static NodeIOData createByNumber(String name, String title, Double value) {
NodeIODataNumberContent datContent = new NodeIODataNumberContent();
datContent.setValue(value);
datContent.setTitle(title);
return NodeIOData.builder().name(name).content(datContent).build();
}
public static NodeIOData createByBool(String name, String title, Boolean value) {
NodeIODataBoolContent datContent = new NodeIODataBoolContent();
datContent.setValue(value);
datContent.setTitle(title);
return NodeIOData.builder().name(name).content(datContent).build();
}
public static NodeIOData createByFiles(String name, String title, List<String> value) {
NodeIODataFilesContent datContent = new NodeIODataFilesContent();
datContent.setValue(value);
datContent.setTitle(title);
return NodeIOData.builder().name(name).content(datContent).build();
}
public static NodeIOData createByOptions(String name, String title, Map<String, Object> value) {
NodeIODataOptionsContent datContent = new NodeIODataOptionsContent();
datContent.setValue(value);
datContent.setTitle(title);
return NodeIOData.builder().name(name).content(datContent).build();
}
public String valueToString() {
return content.getValue().toString();
}
}

View File

@@ -0,0 +1,22 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import java.io.Serial;
import java.io.Serializable;
@EqualsAndHashCode(callSuper = true)
@Data
public class NodeIODataBoolContent extends NodeIODataContent<Boolean> implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String title;
private Integer type = WfIODataTypeEnum.BOOL.getValue();
private Boolean value;
}

View File

@@ -0,0 +1,13 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
@Data
public abstract class NodeIODataContent<T> {
private String title;
private Integer type;
private T value;
}

View File

@@ -0,0 +1,23 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import java.io.Serial;
import java.io.Serializable;
import java.util.List;
@EqualsAndHashCode(callSuper = true)
@Data
public class NodeIODataFilesContent extends NodeIODataContent<List<String>> implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String title;
private Integer type = WfIODataTypeEnum.FILES.getValue();
private List<String> value;
}

View File

@@ -0,0 +1,22 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import java.io.Serial;
import java.io.Serializable;
@EqualsAndHashCode(callSuper = true)
@Data
public class NodeIODataNumberContent extends NodeIODataContent<Double> implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String title;
private Integer type = WfIODataTypeEnum.NUMBER.getValue();
private Double value;
}

View File

@@ -0,0 +1,23 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import java.io.Serial;
import java.io.Serializable;
import java.util.Map;
@EqualsAndHashCode(callSuper = true)
@Data
public class NodeIODataOptionsContent extends NodeIODataContent<Map<String, Object>> implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String title;
private Integer type = WfIODataTypeEnum.OPTIONS.getValue();
private Map<String, Object> value;
}

View File

@@ -0,0 +1,22 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import java.io.Serial;
import java.io.Serializable;
@EqualsAndHashCode(callSuper = true)
@Data
public class NodeIODataTextContent extends NodeIODataContent<String> implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
private String title;
private Integer type = WfIODataTypeEnum.TEXT.getValue();
private String value;
}

View File

@@ -0,0 +1,10 @@
package org.ruoyi.workflow.workflow.data;
import lombok.Data;
import java.util.List;
@Data
public class WfUserReq {
private List<NodeIOData> inputs;
}

View File

@@ -0,0 +1,29 @@
package org.ruoyi.workflow.workflow.def;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import java.io.Serializable;
/**
* 工作流节点输入输出参数定义
*/
@Data
@NoArgsConstructor
public abstract class WfNodeIO implements Serializable {
protected String uuid;
protected Integer type;
protected String name;
protected String title;
protected Boolean required;
/**
* 检查数据是否合规
*
* @param data 节点输入输出数据
* @return 是否正确
*/
public abstract boolean checkValue(NodeIOData data);
}

View File

@@ -0,0 +1,27 @@
package org.ruoyi.workflow.workflow.def;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataBoolContent;
/**
* 用户输入参数-布尔类型 参数定义
*/
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class WfNodeIOBool extends WfNodeIO {
protected Integer type = WfIODataTypeEnum.BOOL.getValue();
@Override
public boolean checkValue(NodeIOData data) {
if (!(data.getContent() instanceof NodeIODataBoolContent)) {
return false;
}
return !required || null != data.getContent().getValue();
}
}

View File

@@ -0,0 +1,28 @@
package org.ruoyi.workflow.workflow.def;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.apache.commons.collections4.CollectionUtils;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataFilesContent;
/**
* 用户输入参数-文件列表类型 参数定义
*/
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class WfNodeIOFiles extends WfNodeIO {
protected Integer type = WfIODataTypeEnum.FILES.getValue();
private Integer limit;
@Override
public boolean checkValue(NodeIOData data) {
if (!(data.getContent() instanceof NodeIODataFilesContent wfNodeIOFiles)) {
return false;
}
return !required || !CollectionUtils.isEmpty(wfNodeIOFiles.getValue());
}
}

View File

@@ -0,0 +1,26 @@
package org.ruoyi.workflow.workflow.def;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataNumberContent;
/**
* 用户输入参数-数字类型 参数定义
*/
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class WfNodeIONumber extends WfNodeIO {
protected Integer type = WfIODataTypeEnum.NUMBER.getValue();
@Override
public boolean checkValue(NodeIOData data) {
if (!(data.getContent() instanceof NodeIODataNumberContent nodeIONumber)) {
return false;
}
return !required || null != nodeIONumber.getValue();
}
}

View File

@@ -0,0 +1,34 @@
package org.ruoyi.workflow.workflow.def;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataOptionsContent;
import java.util.Map;
/**
* 用户输入参数-下拉选项类型 参数定义
*/
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class WfNodeIOOptions extends WfNodeIO {
protected Integer type = WfIODataTypeEnum.OPTIONS.getValue();
private Boolean multiple;
@Override
public boolean checkValue(NodeIOData data) {
if (!(data.getContent() instanceof NodeIODataOptionsContent optionsData)) {
return false;
}
Map<String, Object> value = optionsData.getValue();
if (required && null == value) {
return false;
}
//如果设置了单选,传过来的值是多项,则检查不通过
return multiple || null == value || value.size() <= 1;
}
}

View File

@@ -0,0 +1,35 @@
package org.ruoyi.workflow.workflow.def;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.data.NodeIODataTextContent;
/**
* 用户输入参数-文本类型 参数定义
*/
@EqualsAndHashCode(callSuper = true)
@Data
@NoArgsConstructor
public class WfNodeIOText extends WfNodeIO {
protected Integer type = WfIODataTypeEnum.TEXT.getValue();
@JsonProperty("max_length")
private Integer maxLength;
@Override
public boolean checkValue(NodeIOData data) {
if (!(data.getContent() instanceof NodeIODataTextContent optionsData)) {
return false;
}
String value = optionsData.getValue();
if (required && null == value) {
return false;
}
return null == maxLength || value.length() <= maxLength;
}
}

View File

@@ -0,0 +1,23 @@
package org.ruoyi.workflow.workflow.def;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.io.Serializable;
/**
* 输入参数-引用类型 参数定义<br/>
* 1.该参数的值是另一个节点的输出/或输入参数<br/>
* 2.该类型参数只在非开始节点中使用<br/>
* 3.通常做为输入参数使用
*/
@Data
public class WfNodeParamRef implements Serializable {
@JsonProperty("node_uuid")
private String nodeUuid;
@JsonProperty("node_param_name")
private String nodeParamName;
private String name;
}

View File

@@ -0,0 +1,10 @@
package org.ruoyi.workflow.workflow.edge;
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode(callSuper = true)
@Data
public class ConditionalEdge extends Edge {
private String sourceHandle;
}

View File

@@ -0,0 +1,11 @@
package org.ruoyi.workflow.workflow.edge;
import lombok.Data;
import java.util.List;
@Data
public class Edge {
private String sourceNodeUuid;
private List<String> targetNodeUuid;
}

View File

@@ -0,0 +1,213 @@
package org.ruoyi.workflow.workflow.node;
import com.fasterxml.jackson.databind.node.ObjectNode;
import jakarta.validation.ConstraintViolation;
import lombok.Data;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.common.core.exception.base.BaseException;
import org.ruoyi.workflow.base.NodeInputConfigTypeHandler;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.enums.WfIODataTypeEnum;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.util.SpringUtil;
import org.ruoyi.workflow.workflow.NodeProcessResult;
import org.ruoyi.workflow.workflow.WfNodeInputConfig;
import org.ruoyi.workflow.workflow.WfNodeState;
import org.ruoyi.workflow.workflow.WfState;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import org.ruoyi.workflow.workflow.def.WfNodeIO;
import org.ruoyi.workflow.workflow.def.WfNodeParamRef;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.*;
import static org.ruoyi.workflow.enums.ErrorEnum.A_WF_NODE_CONFIG_ERROR;
import static org.ruoyi.workflow.enums.ErrorEnum.A_WF_NODE_CONFIG_NOT_FOUND;
/**
* 节点实例-运行时
*/
@Data
@Slf4j
public abstract class AbstractWfNode {
protected WorkflowComponent wfComponent;
protected WfState wfState;
@Getter
protected WfNodeState state;
protected WorkflowNode node;
public AbstractWfNode(WorkflowComponent wfComponent, WorkflowNode node, WfState wfState, WfNodeState nodeState) {
this.wfState = wfState;
this.wfComponent = wfComponent;
this.state = nodeState;
this.node = node;
}
public void initInput() {
WfNodeInputConfig nodeInputConfig = NodeInputConfigTypeHandler.fillNodeInputConfig(node.getInputConfig());
if (wfState.getCompletedNodes().isEmpty()) {
log.info("没有上游节点,当前节点为开始节点");
state.getInputs().addAll(wfState.getInput());
return;
}
List<NodeIOData> inputs = new ArrayList<>();
//将上游节点的输出转成当前节点的输入
List<NodeIOData> upstreamOutputs = wfState.getLatestOutputs();
if (!upstreamOutputs.isEmpty()) {
inputs.addAll(new ArrayList<>(upstreamOutputs));
} else {
log.warn("upstream output params is empty");
}
//处理引用类型的输入参数,非开始节点只有引用类型输入参数
List<WfNodeParamRef> refInputDefs = nodeInputConfig.getRefInputs();
inputs.addAll(changeRefersToNodeIODatas(refInputDefs));
//根据节点的输入参数定义,刷选出符合要求的输入参数
WfNodeInputConfig inputConfig = JsonUtil.toBean(node.getInputConfig(), WfNodeInputConfig.class);
List<String> defInputNames = inputConfig.getRefInputs().stream().map(WfNodeParamRef::getName).collect(Collectors.toList());
defInputNames.addAll(inputConfig.getUserInputs().stream().map(WfNodeIO::getName).toList());
List<NodeIOData> needInputs = inputs.stream().filter(item -> {
String needInputName = item.getName();
//上流节点的默认输出参数(output)改成input即可
if (DEFAULT_OUTPUT_PARAM_NAME.equals(needInputName)) {
item.setName(DEFAULT_INPUT_PARAM_NAME);
return true;
}
return defInputNames.contains(needInputName);
}).toList();
state.getInputs().addAll(needInputs);
}
/**
* 查找引用节点的参数并转成输入输出参数
*
* @param referParams 引用类型的定义列表
*/
private List<NodeIOData> changeRefersToNodeIODatas(List<WfNodeParamRef> referParams) {
List<NodeIOData> result = new ArrayList<>();
for (WfNodeParamRef referParam : referParams) {
String nodeUuid = referParam.getNodeUuid();
String nodeParamName = referParam.getNodeParamName();
NodeIOData newInput = createByReferParam(nodeUuid, nodeParamName);
if (null != newInput) {
newInput.setName(referParam.getName());
result.add(newInput);
} else {
log.warn("Can not find reference node output param,refNodeId:{},refNodeOutputName:{}", nodeUuid, nodeParamName);
}
}
return result;
}
public NodeIOData createByReferParam(String refNodeUuid, String refNodeParamName) {
Optional<NodeIOData> hitDataOpt = wfState.getIOByNodeUuid(refNodeUuid)
.stream()
.filter(wfNodeIOData -> wfNodeIOData.getName().equalsIgnoreCase(refNodeParamName))
.findFirst();
return hitDataOpt.map(SerializationUtils::clone).orElse(null);
}
public NodeProcessResult process(Consumer<WfNodeState> inputConsumer, Consumer<WfNodeState> outputConsumer) {
log.info("↓↓↓↓↓ node process start,name:{},uuid:{}", node.getTitle(), node.getUuid());
state.setProcessStatus(NODE_PROCESS_STATUS_DOING);
initInput();
//HumanFeedback的情况
Object humanFeedbackState = state.data().get(HUMAN_FEEDBACK_KEY);
if (null != humanFeedbackState) {
String userInput = humanFeedbackState.toString();
if (StringUtils.isNotBlank(userInput)) {
state.getInputs().add(NodeIOData.createByText(HUMAN_FEEDBACK_KEY, "default", userInput));
}
}
if (null != inputConsumer) {
inputConsumer.accept(state);
}
log.info("--node input:{}", JsonUtil.toJson(state.getInputs()));
NodeProcessResult processResult;
try {
processResult = onProcess();
} catch (Exception e) {
state.setProcessStatus(NODE_PROCESS_STATUS_FAIL);
state.setProcessStatusRemark("process error:" + e.getMessage());
wfState.setProcessStatus(WORKFLOW_PROCESS_STATUS_FAIL);
log.info("↑↑↑↑↑ node process error,name:{},uuid:{},error", node.getTitle(), node.getUuid(), e);
if (null != outputConsumer) {
outputConsumer.accept(state);
}
throw new RuntimeException(e);
}
if (!processResult.getContent().isEmpty()) {
state.setOutputs(processResult.getContent());
}
state.setProcessStatus(NODE_PROCESS_STATUS_SUCCESS);
wfState.getCompletedNodes().add(this);
log.info("↑↑↑↑↑ node process end,name:{},uuid:{},output:{}",
node.getTitle(), node.getUuid(), JsonUtil.toJson(state.getOutputs()));
if (null != outputConsumer) {
outputConsumer.accept(state);
}
return processResult;
}
protected abstract NodeProcessResult onProcess();
protected String getFirstInputText() {
String firstInputText;
if (state.getInputs().size() > 1) {
firstInputText = state.getInputs()
.stream()
.filter(item -> WfIODataTypeEnum.TEXT.getValue().equals(item.getContent().getType()) && !DEFAULT_INPUT_PARAM_NAME.equals(item.getName()))
.map(NodeIOData::valueToString)
.findFirst()
.orElse("");
} else {
firstInputText = state.getInputs().get(0).valueToString();
}
return firstInputText;
}
protected <T> T checkAndGetConfig(Class<T> clazz) {
ObjectNode configObj = JsonUtil.toBean(node.getNodeConfig(), ObjectNode.class);
if (configObj.isEmpty()) {
log.error("node config is empty,node uuid:{}", state.getUuid());
throw new BaseException(A_WF_NODE_CONFIG_NOT_FOUND.getInfo());
}
log.info("node config:{}", configObj);
T nodeConfig = JsonUtil.fromJson(configObj, clazz);
if (null == nodeConfig) {
log.warn("找不到节点的配置,node uuid:{}", state.getUuid());
throw new BaseException(A_WF_NODE_CONFIG_ERROR.getInfo());
}
boolean configValid = true;
try {
Set<ConstraintViolation<T>> violations = SpringUtil.getBean("beanValidator", LocalValidatorFactoryBean.class).validate(nodeConfig);
for (ConstraintViolation<T> violation : violations) {
log.error(violation.getMessage());
configValid = false;
}
} catch (Exception e) {
log.error("节点配置校验失败,node uuid:{},error:{}", state.getUuid(), e.getMessage());
configValid = false;
}
if (!configValid) {
log.warn("节点配置错误,node uuid:{}", state.getUuid());
throw new BaseException(A_WF_NODE_CONFIG_ERROR.getInfo());
}
return nodeConfig;
}
}

View File

@@ -0,0 +1,41 @@
package org.ruoyi.workflow.workflow.node;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.extern.slf4j.Slf4j;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.util.JsonUtil;
import org.ruoyi.workflow.workflow.*;
import org.ruoyi.workflow.workflow.data.NodeIOData;
import java.util.ArrayList;
import java.util.List;
import static org.ruoyi.workflow.cosntant.AdiConstant.WorkflowConstant.DEFAULT_OUTPUT_PARAM_NAME;
@Slf4j
public class EndNode extends AbstractWfNode {
public EndNode(WorkflowComponent wfComponent, WorkflowNode nodeDef, WfState wfState, WfNodeState nodeState) {
super(wfComponent, nodeDef, wfState, nodeState);
}
@Override
protected NodeProcessResult onProcess() {
List<NodeIOData> result = new ArrayList<>();
ObjectNode objectConfig = JsonUtil.toBean(node.getNodeConfig(), ObjectNode.class);
JsonNode resultNode = objectConfig.get("result");
String output = "";
if (null == resultNode) {
log.warn("EndNode result config is empty, nodeUuid: {}, title: {}", node.getUuid(), node.getTitle());
} else {
String resultTemplate = resultNode.asText();
WfNodeIODataUtil.changeFilesContentToMarkdown(state.getInputs());
output = WorkflowUtil.renderTemplate(resultTemplate, state.getInputs());
}
result.add(NodeIOData.createByText(DEFAULT_OUTPUT_PARAM_NAME, "", output));
return NodeProcessResult.builder().content(result).build();
}
}

View File

@@ -0,0 +1,52 @@
package org.ruoyi.workflow.workflow.node.answer;
import dev.langchain4j.data.message.UserMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ruoyi.workflow.entity.WorkflowComponent;
import org.ruoyi.workflow.entity.WorkflowNode;
import org.ruoyi.workflow.util.SpringUtil;
import org.ruoyi.workflow.workflow.NodeProcessResult;
import org.ruoyi.workflow.workflow.WfNodeState;
import org.ruoyi.workflow.workflow.WfState;
import org.ruoyi.workflow.workflow.WorkflowUtil;
import org.ruoyi.workflow.workflow.node.AbstractWfNode;
import java.util.List;
/**
* 【节点】LLM生成回答 <br/>
* 节点内容固定格式LLMAnswerNodeConfig
*/
@Slf4j
public class LLMAnswerNode extends AbstractWfNode {
public LLMAnswerNode(WorkflowComponent wfComponent, WorkflowNode nodeDef, WfState wfState, WfNodeState nodeState) {
super(wfComponent, nodeDef, wfState, nodeState);
}
/**
* nodeConfig格式<br/>
* {"prompt": "将以下内容翻译成英文:{input}","model_platform":"deepseek","model_name":"deepseek-chat"}<br/>
*
* @return LLM的返回内容
*/
@Override
public NodeProcessResult onProcess() {
LLMAnswerNodeConfig nodeConfigObj = checkAndGetConfig(LLMAnswerNodeConfig.class);
String inputText = getFirstInputText();
log.info("LLM answer node config:{}", nodeConfigObj);
String prompt = inputText;
if (StringUtils.isNotBlank(nodeConfigObj.getPrompt())) {
prompt = WorkflowUtil.renderTemplate(nodeConfigObj.getPrompt(), state.getInputs());
}
log.info("LLM prompt:{}", prompt);
// 调用LLM
WorkflowUtil workflowUtil = SpringUtil.getBean(WorkflowUtil.class);
String modelName = nodeConfigObj.getModelName();
String category = nodeConfigObj.getCategory();
List<UserMessage> systemMessage = List.of(UserMessage.from(prompt));
workflowUtil.streamingInvokeLLM(wfState, state, node, category, modelName, systemMessage);
return new NodeProcessResult();
}
}

View File

@@ -0,0 +1,26 @@
package org.ruoyi.workflow.workflow.node.answer;
import com.fasterxml.jackson.annotation.JsonProperty;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode
@Data
public class LLMAnswerNodeConfig {
@NotBlank
private String prompt;
/**
* TODO
*/
// @NotBlank
private String category;
@NotNull
@JsonProperty("model_name")
private String modelName;
private Boolean streaming;
}

View File

@@ -0,0 +1,16 @@
package org.ruoyi.workflow.workflow.node.classifier;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
@Data
public class ClassifierNodeConfig {
private List categories = new ArrayList<>();
@JsonProperty("model_platform")
private String modelPlatform;
@JsonProperty("model_name")
private String modelName;
}

Some files were not shown because too many files have changed in this diff Show More