Quellcode durchsuchen

支持 DeepSeek 聊天接口流式输出(SSE)

为 DeepSeek 聊天接口新增 stream 参数,支持以 SSE 方式流式返回消息增量。更新了请求模型、服务方法签名及接口定义,并补充了相关注释说明。
Lyyyi vor 12 Stunden
Ursprung
Commit
484cd54f86

+ 7 - 0
OASystem/OASystem.Api/OAMethodLib/DeepSeekAPI/DeepSeekModels.cs

@@ -168,6 +168,13 @@ namespace OASystem.API.OAMethodLib.DeepSeekAPI
         [JsonPropertyName("messages")]
         public List<FileMessage> Messages { get; set; }
 
+        /// <summary>
+        /// SSE的形式以流式发送消息增量
+        /// 如果设置为 True,将会以 SSE(server-sent events)的形式以流式发送消息增量。消息流以 data: [DONE] 结尾。
+        /// </summary>
+        [JsonPropertyName("stream")]
+        public bool Stream { get; set; } = false;
+
         /// <summary>
         /// 温度参数
         /// </summary>

+ 8 - 1
OASystem/OASystem.Api/OAMethodLib/DeepSeekAPI/DeepSeekService.cs

@@ -322,7 +322,13 @@ namespace OASystem.API.OAMethodLib.DeepSeekAPI
         /// <summary>
         /// 使用进行聊天
         /// </summary>
-        public async Task<ApiResponse> ChatAsync(string question, string model = "deepseek-chat", float temperature = 0.7f, int maxTokens = 4000)
+        /// <param name="question"></param>
+        /// <param name="model"></param>
+        /// <param name="stream"></param>
+        /// <param name="temperature"></param>
+        /// <param name="maxTokens"></param>
+        /// <returns></returns>
+        public async Task<ApiResponse> ChatAsync(string question, bool stream = false, string model = "deepseek-chat",  float temperature = 0.7f, int maxTokens = 4000)
         {
             try
             {
@@ -343,6 +349,7 @@ namespace OASystem.API.OAMethodLib.DeepSeekAPI
                             Content = messageContent
                         }
                     },
+                    Stream = stream,
                     Temperature = temperature,
                     MaxTokens = maxTokens
                 };

+ 3 - 2
OASystem/OASystem.Api/OAMethodLib/DeepSeekAPI/IDeepSeekService.cs

@@ -65,14 +65,15 @@
         Task<ApiResponse> ChatWithFilesAsync(List<string> fileIds, string question, string model = "deepseek-chat", float temperature = 0.7f, int maxTokens = 4000);
 
         /// <summary>
-        /// 使用已上传的文件进行聊天
+        /// chat接口 - 直接提问(不使用文件上下文)
         /// </summary>
         /// <param name="question">问题</param>
         /// <param name="model">模型名称</param>
+        /// <param name="stream">是否流式输出</param>
         /// <param name="temperature">温度参数</param>
         /// <param name="maxTokens">最大token数</param>
         /// <returns>聊天响应</returns>
-        Task<ApiResponse> ChatAsync(string question, string model = "deepseek-chat", float temperature = 0.7f, int maxTokens = 4000);
+        Task<ApiResponse> ChatAsync(string question, bool stream = false, string model = "deepseek-chat", float temperature = 0.7f, int maxTokens = 4000);
 
         /// <summary>
         /// 等待文件处理完成