|
@@ -0,0 +1,63 @@
|
|
|
+package com.webchat.aigc.llm;
|
|
|
+
|
|
|
+import com.webchat.common.enums.LlmModelEnum;
|
|
|
+import com.webchat.common.exception.BusinessException;
|
|
|
+import org.springframework.beans.BeansException;
|
|
|
+import org.springframework.beans.factory.InitializingBean;
|
|
|
+import org.springframework.context.ApplicationContext;
|
|
|
+import org.springframework.context.ApplicationContextAware;
|
|
|
+import org.springframework.stereotype.Component;
|
|
|
+
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.Map;
|
|
|
+
|
|
|
+/**
|
|
|
+ * @author 程序员王七七 https://www.coderutil.com 网站作者
|
|
|
+ * @date 2024/10/29 22:47
|
|
|
+ *
|
|
|
+ * 抽象大模型对话工厂服务
|
|
|
+ */
|
|
|
+@Component
|
|
|
+public class LLMServiceFactory implements InitializingBean, ApplicationContextAware {
|
|
|
+
|
|
|
+ private ApplicationContext applicationContext;
|
|
|
+
|
|
|
+ private static final Map<String, AbstractLLMChatService> serviceMap = new HashMap<>();
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
|
|
+ this.applicationContext = applicationContext;
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void afterPropertiesSet() throws Exception {
|
|
|
+ this.initServiceMap();
|
|
|
+ }
|
|
|
+
|
|
|
+ /**
|
|
|
+ * 初始化模型服务
|
|
|
+ */
|
|
|
+ private void initServiceMap() {
|
|
|
+ /**
|
|
|
+ * kimi
|
|
|
+ */
|
|
|
+ serviceMap.put(LlmModelEnum.KIMI.getModel(), applicationContext.getBean(KimiAIService.class));
|
|
|
+ /**
|
|
|
+ * deepseek
|
|
|
+ */
|
|
|
+ serviceMap.put(LlmModelEnum.DEEPSEEK.getModel(), applicationContext.getBean(DeepSeekAIService.class));
|
|
|
+
|
|
|
+ /**
|
|
|
+ * ollama
|
|
|
+ */
|
|
|
+ serviceMap.put(LlmModelEnum.OLLAMA.getModel(), applicationContext.getBean(OllamaService.class));
|
|
|
+ }
|
|
|
+
|
|
|
+ public static AbstractLLMChatService getLLMService(String model) {
|
|
|
+ AbstractLLMChatService llmChatService = serviceMap.get(model);
|
|
|
+ if (llmChatService == null) {
|
|
|
+ throw new BusinessException("不支持的模型");
|
|
|
+ }
|
|
|
+ return llmChatService;
|
|
|
+ }
|
|
|
+}
|