init llm config

This commit is contained in:
Chuck1sn
2025-05-23 09:42:42 +08:00
parent f94240dd02
commit 2f3a5abd55
11 changed files with 144 additions and 17 deletions

View File

@@ -4,6 +4,8 @@ import com.zl.mjga.config.ai.AiChatAssistant;
import dev.langchain4j.service.TokenStream;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jooq.generated.default_schema.enums.LlmCodeEnum;
import org.jooq.generated.mjga.tables.pojos.AiLlmConfig;
import org.springframework.stereotype.Service;
@Service
@@ -13,6 +15,7 @@ public class AiChatService {
private final AiChatAssistant deepSeekChatAssistant;
private final AiChatAssistant zhiPuChatAssistant;
private final LlmService llmService;
public TokenStream chatWithDeepSeek(String sessionIdentifier, String userMessage) {
return deepSeekChatAssistant.chat(sessionIdentifier, userMessage);
@@ -21,4 +24,13 @@ public class AiChatService {
public TokenStream chatWithZhiPu(String sessionIdentifier, String userMessage) {
return zhiPuChatAssistant.chat(sessionIdentifier, userMessage);
}
public TokenStream chatPrecedenceLlmWith(String sessionIdentifier, String userMessage) {
AiLlmConfig precedenceLlmBy = llmService.getPrecedenceLlmBy(true);
LlmCodeEnum code = precedenceLlmBy.getCode();
return switch (code) {
case ZHI_PU -> zhiPuChatAssistant.chat(sessionIdentifier, userMessage);
case DEEP_SEEK -> deepSeekChatAssistant.chat(sessionIdentifier, userMessage);
};
}
}