1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768 |
- using OpenAI.GPT3;
- using OpenAI.GPT3.Managers;
- using OpenAI.GPT3.ObjectModels;
- using OpenAI.GPT3.ObjectModels.RequestModels;
- namespace OASystem.API.OAMethodLib.ChatGPT
- {
- /// <summary>
- /// ChatGPT 3.5
- /// </summary>
- public static class ChatGPTTools
- {
- private readonly static string _appKey = "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";
- private readonly static HttpClient _httpClient = new HttpClient { BaseAddress = new Uri("https://api.openai.com") };
- /// <summary>
- /// ChatGPT - Completions 自动补全
- /// </summary>
- /// <param name="prompt">提示(问题)</param>
- /// <returns></returns>
- public static async Task<Result> Completions(string prompt)
- {
- var result = new Result();
- string url = string.Format("{0}", "/v1/completions");
- var reqData = new CompletionCreateRequest()
- {
- Prompt = prompt, //提示(问题) 你向API提供的提示,即你提出的问题
- Temperature = 0.3f, //创新采样 取值范围0-1,当设置为0时,对于相同的问题,模型始终返回相同或者相近的结果,
- //设置1则与上次的回答更加不同。默认1
- TopP = 1f, //默认1。替代temperature使用,考虑了模型质量,0.1为生成结果的质量为10%,建议使用1。
- N = 1, //默认1。针对提示所生成结果的次数,会大量消耗令牌。
- Stream = false, //默认false。是否以流式返回部分进度,流完成后会发送终止消息data:[DONE]
- Echo = false, //默认false,在返回结果外,还显示用户给的问题
- //Stop = "", //最多四个序列,返回的文本不包括停止序列
- MaxTokens = 1024, //默认16,大多数模型支持最大为204
- };
- return await PostChatGPT(url, reqData);
- }
- /// <summary>
- /// post 访问
- /// </summary>
- /// <param name="url">接口路径</param>
- /// <param name="formData">请求参数</param>
- /// <returns></returns>
- public static async Task<Result> PostChatGPT(string url, CompletionCreateRequest reqData)
- {
- Result result = new Result();
- OpenAIService service = new OpenAIService(new OpenAiOptions() { ApiKey = _appKey });
- var res = await service.Completions.CreateCompletion(reqData, Models.TextDavinciV3);
- if (res.Successful)
- {
- result.Code = 0;
- result.Data = new { Text = res.Choices.FirstOrDefault().Text };
- }
- else result.Msg = "访问失败!";
- return result;
- }
- }
- }
|