CSharp OpenAI
微软有个开源框架,可以使用C#调用OpenAI接口。地址如下:
GitHub - microsoft/semantic-kernel: Integrate cutting-edge LLM technology quickly and easily into your apps
今天研究了下,看如果使用国内代理来使用OpenAI的API。
国内代理使用的是之前文章中的代理:
国内访问OpenAI API_openai代理-CSDN博客
微软这个框架也可以通过在Visual Studio中的NuGet来安装。
框架示例中有个Create_Kernel的代码,我改造了一下,就可以使用国内代理来运行了。
改造后的代码如下:
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using OpenAI;
using System.ClientModel;
public sealed class S01_Create_Kernel
{
public async Task RunAsync()
{
OpenAIClientOptions options = new OpenAIClientOptions();
options.Endpoint = new Uri("https://api.openai-proxy.org/v1");
OpenAIClient client = new OpenAIClient(new ApiKeyCredential("sk-xxxxx"), options);
Kernel kernel = Kernel.CreateBuilder().AddOpenAIChatCompletion(modelId: "gpt-4", client).Build();
// 1.
Console.WriteLine(await kernel.InvokePromptAsync("What color is the sky?"));
Console.WriteLine();
// 2.
KernelArguments arguments = new() { { "topic", "sea" } };
Console.WriteLine(await kernel.InvokePromptAsync("What color is the {{$topic}}?", arguments));
Console.WriteLine();
// 3.
await foreach (var update in kernel.InvokePromptStreamingAsync("What color is the {{$topic}}? Provide a detailed explanation.", arguments))
{
Console.Write(update);
}
Console.WriteLine(string.Empty);
// 4.
arguments = new(new OpenAIPromptExecutionSettings { MaxTokens = 500, Temperature = 0.5 }) { { "topic", "dogs" } };
Console.WriteLine(await kernel.InvokePromptAsync("Tell me a story about {{$topic}}?", arguments));
// 5.
#pragma warning disable SKEXP0010
arguments = new(new OpenAIPromptExecutionSettings { ResponseFormat = "json_object" }) { { "topic", "chocolate" } };
Console.WriteLine(await kernel.InvokePromptAsync("Create a recipe for a {{$topic}} cake in JSON format", arguments));
}
public static async Task Main()
{
await new S01_Create_Kernel().RunAsync();
}
}
只需将代码中的sk-xxxxx替换为自己代理中的apikey就可以运行了。