add llm config

This commit is contained in:
Chuck1sn
2025-05-24 13:29:09 +08:00
parent 43728ee733
commit c2d5fddcc0
22 changed files with 675 additions and 38 deletions

View File

@@ -2,6 +2,7 @@ package com.zl.mjga.controller;
import com.zl.mjga.dto.PageRequestDto;
import com.zl.mjga.dto.PageResponseDto;
import com.zl.mjga.dto.ai.LlmQueryDto;
import com.zl.mjga.dto.ai.LlmVm;
import com.zl.mjga.service.AiChatService;
import com.zl.mjga.service.LlmService;
@@ -52,7 +53,8 @@ public class AiController {
@PreAuthorize("hasAuthority(T(com.zl.mjga.model.urp.EPermission).READ_LLM_CONFIG_PERMISSION)")
@GetMapping("/llm/page-query")
@ResponseStatus(HttpStatus.OK)
public PageResponseDto<List<LlmVm>> pageQueryLlm(@ModelAttribute PageRequestDto pageRequestDto) {
return llmService.pageQueryLlm(pageRequestDto);
public PageResponseDto<List<LlmVm>> pageQueryLlm(
@ModelAttribute PageRequestDto pageRequestDto, @ModelAttribute LlmQueryDto llmQueryDto) {
return llmService.pageQueryLlm(pageRequestDto, llmQueryDto);
}
}

View File

@@ -0,0 +1,3 @@
package com.zl.mjga.dto.ai;
public record LlmQueryDto(String name) {}

View File

@@ -1,8 +1,11 @@
package com.zl.mjga.repository;
import static org.jooq.generated.mjga.Tables.AI_LLM_CONFIG;
import static org.jooq.impl.DSL.noCondition;
import com.zl.mjga.dto.PageRequestDto;
import com.zl.mjga.dto.ai.LlmQueryDto;
import org.apache.commons.lang3.StringUtils;
import org.jooq.Configuration;
import org.jooq.Record;
import org.jooq.Result;
@@ -19,11 +22,15 @@ public class LlmRepository extends AiLlmConfigDao {
super(configuration);
}
public Result<Record> pageFetchBy(PageRequestDto pageRequestDto) {
public Result<Record> pageFetchBy(PageRequestDto pageRequestDto, LlmQueryDto llmQueryDto) {
return ctx()
.select(
AI_LLM_CONFIG.asterisk(), DSL.count().over().as("total_llm").convertFrom(Long::valueOf))
.from(AI_LLM_CONFIG)
.where(
StringUtils.isNotEmpty(llmQueryDto.name())
? AI_LLM_CONFIG.NAME.eq(llmQueryDto.name())
: noCondition())
.orderBy(pageRequestDto.getSortFields())
.limit(pageRequestDto.getSize())
.offset(pageRequestDto.getOffset())

View File

@@ -1,7 +1,9 @@
package com.zl.mjga.service;
import com.zl.mjga.config.ai.AiChatAssistant;
import com.zl.mjga.exception.BusinessException;
import dev.langchain4j.service.TokenStream;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jooq.generated.mjga.enums.LlmCodeEnum;
@@ -26,8 +28,9 @@ public class AiChatService {
}
public TokenStream chatPrecedenceLlmWith(String sessionIdentifier, String userMessage) {
AiLlmConfig precedenceLlmBy = llmService.getPrecedenceLlmBy(true);
LlmCodeEnum code = precedenceLlmBy.getCode();
Optional<AiLlmConfig> precedenceLlmBy = llmService.getPrecedenceLlmBy(true);
AiLlmConfig aiLlmConfig = precedenceLlmBy.orElseThrow(() -> new BusinessException("没有开启的大模型"));
LlmCodeEnum code = aiLlmConfig.getCode();
return switch (code) {
case ZHI_PU -> zhiPuChatAssistant.chat(sessionIdentifier, userMessage);
case DEEP_SEEK -> deepSeekChatAssistant.chat(sessionIdentifier, userMessage);

View File

@@ -2,10 +2,12 @@ package com.zl.mjga.service;
import com.zl.mjga.dto.PageRequestDto;
import com.zl.mjga.dto.PageResponseDto;
import com.zl.mjga.dto.ai.LlmQueryDto;
import com.zl.mjga.dto.ai.LlmVm;
import com.zl.mjga.repository.LlmRepository;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jooq.Record;
@@ -26,16 +28,14 @@ public class LlmService {
return llmRepository.fetchOneByCode(llmCodeEnum);
}
public AiLlmConfig getPrecedenceLlmBy(Boolean enable) {
public Optional<AiLlmConfig> getPrecedenceLlmBy(Boolean enable) {
List<AiLlmConfig> aiLlmConfigs = llmRepository.fetchByEnable(enable);
//noinspection OptionalGetWithoutIsPresent
return aiLlmConfigs.stream()
.max((o1, o2) -> o2.getPriority().compareTo(o1.getPriority()))
.get();
return aiLlmConfigs.stream().max((o1, o2) -> o2.getPriority().compareTo(o1.getPriority()));
}
public PageResponseDto<List<LlmVm>> pageQueryLlm(PageRequestDto pageRequestDto) {
Result<Record> records = llmRepository.pageFetchBy(pageRequestDto);
public PageResponseDto<List<LlmVm>> pageQueryLlm(
PageRequestDto pageRequestDto, LlmQueryDto llmQueryDto) {
Result<Record> records = llmRepository.pageFetchBy(pageRequestDto, llmQueryDto);
if (records.isEmpty()) {
return PageResponseDto.empty();
}