Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;

using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
Expand All @@ -24,52 +22,30 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
/// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
if(!requestedSecret.Success)
yield break;

// Prepare the system prompt:
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager),
};

// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();

// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel);

// Prepare the AlibabaCloud HTTP chat request:
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,

// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);

async Task<HttpRequestMessage> RequestBuilder()
{
// Build the HTTP post request:
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");

// Set the authorization header:
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));

// Set the content:
request.Content = new StringContent(alibabaCloudChatRequest, Encoding.UTF8, "application/json");
return request;
}

await foreach (var content in this.StreamChatCompletionInternal<ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>("AlibabaCloud", RequestBuilder, token))
await foreach (var content in this.StreamOpenAICompatibleChatCompletion<ChatCompletionAPIRequest, ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>(
"AlibabaCloud",
chatModel,
chatThread,
settingsManager,
async (systemPrompt, apiParameters) =>
{
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel);

return new ChatCompletionAPIRequest
{
Model = chatModel.Id,

// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

Stream = true,
AdditionalApiParameters = apiParameters
};
},
token: token))
yield return content;
}

Expand Down Expand Up @@ -183,4 +159,4 @@ private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, SecretStore
return modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)));
}

}
}
72 changes: 72 additions & 0 deletions app/MindWork AI Studio/Provider/BaseProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -565,6 +565,78 @@ protected async IAsyncEnumerable<ContentStreamChunk> StreamResponsesInternal<TDe
streamReader.Dispose();
}

/// <summary>
/// Streams the chat completion from an OpenAI-compatible provider using the Chat Completion API.
/// </summary>
/// <param name="providerName">The provider name for logging and error reporting.</param>
/// <param name="chatModel">The selected chat model.</param>
/// <param name="chatThread">The current chat thread.</param>
/// <param name="settingsManager">The settings manager.</param>
/// <param name="requestFactory">Builds the provider-specific request body.</param>
/// <param name="storeType">The secret store type.</param>
/// <param name="isTryingSecret">Whether the API key is optional.</param>
/// <param name="systemPromptRole">The system prompt role to use.</param>
/// <param name="requestPath">The request path, relative to the provider base URL.</param>
/// <param name="headersAction">Optional additional headers to add.</param>
/// <param name="token">The cancellation token.</param>
/// <typeparam name="TRequest">The request DTO type.</typeparam>
/// <typeparam name="TDelta">The delta stream line type.</typeparam>
/// <typeparam name="TAnnotation">The annotation stream line type.</typeparam>
/// <returns>The streamed content chunks.</returns>
protected async IAsyncEnumerable<ContentStreamChunk> StreamOpenAICompatibleChatCompletion<TRequest, TDelta, TAnnotation>(
string providerName,
Model chatModel,
ChatThread chatThread,
SettingsManager settingsManager,
Func<TextMessage, IDictionary<string, object>, Task<TRequest>> requestFactory,
SecretStoreType storeType = SecretStoreType.LLM_PROVIDER,
bool isTryingSecret = false,
string systemPromptRole = "system",
string requestPath = "chat/completions",
Action<HttpRequestHeaders>? headersAction = null,
[EnumeratorCancellation] CancellationToken token = default)
where TDelta : IResponseStreamLine
where TAnnotation : IAnnotationStreamLine
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, storeType, isTrying: isTryingSecret);
if(!requestedSecret.Success && !isTryingSecret)
yield break;

// Prepare the system prompt:
var systemPrompt = new TextMessage
{
Role = systemPromptRole,
Content = chatThread.PrepareSystemPrompt(settingsManager),
};

// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();

// Prepare the provider HTTP chat request:
var providerChatRequest = JsonSerializer.Serialize(await requestFactory(systemPrompt, apiParameters), JSON_SERIALIZER_OPTIONS);

async Task<HttpRequestMessage> RequestBuilder()
{
// Build the HTTP post request:
var request = new HttpRequestMessage(HttpMethod.Post, requestPath);

// Set the authorization header:
if (requestedSecret.Success)
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));

// Set provider-specific headers:
headersAction?.Invoke(request.Headers);

// Set the content:
request.Content = new StringContent(providerChatRequest, Encoding.UTF8, "application/json");
return request;
}

await foreach (var content in this.StreamChatCompletionInternal<TDelta, TAnnotation>(providerName, RequestBuilder, token))
yield return content;
}

protected async Task<string> PerformStandardTranscriptionRequest(RequestedSecret requestedSecret, Model transcriptionModel, string audioFilePath, Host host = Host.NONE, CancellationToken token = default)
{
try
Expand Down
74 changes: 25 additions & 49 deletions app/MindWork AI Studio/Provider/DeepSeek/ProviderDeepSeek.cs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;

using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
Expand All @@ -24,52 +22,30 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
/// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
if(!requestedSecret.Success)
yield break;

// Prepare the system prompt:
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager),
};

// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();

// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingDirectImageUrlAsync(this.Provider, chatModel);

// Prepare the DeepSeek HTTP chat request:
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,

// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);

async Task<HttpRequestMessage> RequestBuilder()
{
// Build the HTTP post request:
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");

// Set the authorization header:
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));

// Set the content:
request.Content = new StringContent(deepSeekChatRequest, Encoding.UTF8, "application/json");
return request;
}

await foreach (var content in this.StreamChatCompletionInternal<ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>("DeepSeek", RequestBuilder, token))
await foreach (var content in this.StreamOpenAICompatibleChatCompletion<ChatCompletionAPIRequest, ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>(
"DeepSeek",
chatModel,
chatThread,
settingsManager,
async (systemPrompt, apiParameters) =>
{
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingDirectImageUrlAsync(this.Provider, chatModel);

return new ChatCompletionAPIRequest
{
Model = chatModel.Id,

// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

Stream = true,
AdditionalApiParameters = apiParameters
};
},
token: token))
yield return content;
}

Expand Down Expand Up @@ -144,4 +120,4 @@ private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, Can
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data;
}
}
}
20 changes: 0 additions & 20 deletions app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs

This file was deleted.

71 changes: 23 additions & 48 deletions app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;

using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
Expand All @@ -24,53 +21,31 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
/// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
if(!requestedSecret.Success)
yield break;
await foreach (var content in this.StreamOpenAICompatibleChatCompletion<ChatCompletionAPIRequest, ResponseStreamLine, ChatCompletionAnnotationStreamLine>(
"Fireworks",
chatModel,
chatThread,
settingsManager,
async (systemPrompt, apiParameters) =>
{
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel);

// Prepare the system prompt:
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager),
};

// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();

// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel);

// Prepare the Fireworks HTTP chat request:
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,

// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
return new ChatCompletionAPIRequest
{
Model = chatModel.Id,

async Task<HttpRequestMessage> RequestBuilder()
{
// Build the HTTP post request:
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],

// Set the authorization header:
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));

// Set the content:
request.Content = new StringContent(fireworksChatRequest, Encoding.UTF8, "application/json");
return request;
}

await foreach (var content in this.StreamChatCompletionInternal<ResponseStreamLine, ChatCompletionAnnotationStreamLine>("Fireworks", RequestBuilder, token))
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
};
},
token: token))
yield return content;
}

Expand Down Expand Up @@ -126,4 +101,4 @@ public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyPr
}

#endregion
}
}
Loading