Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,11 @@ public class EmbeddingSetting
#region Reasoning model settings
public class ReasoningSetting
{
public float Temperature { get; set; } = 1.0f;
public float? Temperature { get; set; }

[Obsolete("Set EffortLevel in Parameters")]
public string? EffortLevel { get; set; }
public Dictionary<string, ModelParamSetting>? Parameters { get; set; }
}
#endregion

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ public class ChatCompletionProvider : IChatCompletion
protected readonly OpenAiSettings _settings;
protected readonly IServiceProvider _services;
protected readonly ILogger<ChatCompletionProvider> _logger;
protected readonly IConversationStateService _state;

protected string _model;
private List<string> renderedInstructions = [];
Expand All @@ -21,11 +22,13 @@ public class ChatCompletionProvider : IChatCompletion
public ChatCompletionProvider(
OpenAiSettings settings,
ILogger<ChatCompletionProvider> logger,
IServiceProvider services)
IServiceProvider services,
IConversationStateService state)
{
_settings = settings;
_logger = logger;
_services = services;
_state = state;
}

public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDialogModel> conversations)
Expand Down Expand Up @@ -351,7 +354,6 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
protected (string, IEnumerable<ChatMessage>, ChatCompletionOptions) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
{
var agentService = _services.GetRequiredService<IAgentService>();
var state = _services.GetRequiredService<IConversationStateService>();
var settingsService = _services.GetRequiredService<ILlmProviderService>();
var settings = settingsService.GetSetting(Provider, _model);
var allowMultiModal = settings != null && settings.MultiModal;
Expand Down Expand Up @@ -409,7 +411,7 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
var imageDetailLevel = ChatImageDetailLevel.Auto;
if (allowMultiModal)
{
imageDetailLevel = ParseChatImageDetailLevel(state.GetState("chat_image_detail_level"));
imageDetailLevel = ParseChatImageDetailLevel(_state.GetState("chat_image_detail_level"));
}

foreach (var message in filteredMessages)
Expand Down Expand Up @@ -549,20 +551,15 @@ private string GetPrompt(IEnumerable<ChatMessage> messages, ChatCompletionOption

private ChatCompletionOptions InitChatCompletionOption(Agent agent)
{
var state = _services.GetRequiredService<IConversationStateService>();
var settingsService = _services.GetRequiredService<ILlmProviderService>();
var settings = settingsService.GetSetting(Provider, _model);

// Reasoning effort
ChatReasoningEffortLevel? reasoningEffortLevel = null;
float? temperature = float.Parse(state.GetState("temperature", "0.0"));
if (settings?.Reasoning != null)
{
temperature = settings.Reasoning.Temperature;
var level = state.GetState("reasoning_effort_level")
.IfNullOrEmptyAs(agent?.LlmConfig?.ReasoningEffortLevel)
.IfNullOrEmptyAs(settings?.Reasoning?.EffortLevel);
reasoningEffortLevel = ParseReasoningEffortLevel(level);
// Reasoning
float? temperature = float.Parse(_state.GetState("temperature", "0.0"));
var (reasoningTemp, reasoningEffortLevel) = ParseReasoning(settings?.Reasoning, agent);
if (reasoningTemp.HasValue)
{
temperature = reasoningTemp.Value;
}

// Web search
Expand All @@ -574,7 +571,7 @@ private ChatCompletionOptions InitChatCompletionOption(Agent agent)
webSearchOptions = new();
}

var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
var maxTokens = int.TryParse(_state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;

Expand All @@ -587,16 +584,61 @@ private ChatCompletionOptions InitChatCompletionOption(Agent agent)
};
}

/// <summary>
/// Parse reasoning setting: returns (temperature, reasoning effort level)
/// </summary>
/// <param name="settings"></param>
/// <returns></returns>
private (float?, ChatReasoningEffortLevel?) ParseReasoning(
ReasoningSetting? settings,
Agent agent)
{
float? temperature = null;
ChatReasoningEffortLevel? reasoningEffortLevel = null;

if (settings == null)
{
return (temperature, reasoningEffortLevel);
}

if (settings.Temperature.HasValue)
{
temperature = settings.Temperature;
}


var defaultLevel = settings?.EffortLevel;

if (settings?.Parameters != null
&& settings.Parameters.TryGetValue("EffortLevel", out var settingValue)
&& !string.IsNullOrEmpty(settingValue?.Default))
{
defaultLevel = settingValue.Default;
}

var level = _state.GetState("reasoning_effort_level")
.IfNullOrEmptyAs(agent?.LlmConfig?.ReasoningEffortLevel)
.IfNullOrEmptyAs(defaultLevel);

reasoningEffortLevel = ParseReasoningEffortLevel(level);

return (temperature, reasoningEffortLevel);
}

private ChatReasoningEffortLevel? ParseReasoningEffortLevel(string? level)
{
if (string.IsNullOrWhiteSpace(level))
{
return null;
}

var effortLevel = new ChatReasoningEffortLevel("minimal");
switch (level.ToLower())
var effortLevel = new ChatReasoningEffortLevel("low");
level = level.ToLower();
switch (level)
{
case "minimal":
effortLevel = ChatReasoningEffortLevel.Minimal;
break;
case "low":
effortLevel = ChatReasoningEffortLevel.Low;
break;
Expand All @@ -606,7 +648,12 @@ private ChatCompletionOptions InitChatCompletionOption(Agent agent)
case "high":
effortLevel = ChatReasoningEffortLevel.High;
break;
case "none":
case "xhigh":
effortLevel = new ChatReasoningEffortLevel(level);
break;
default:
effortLevel = new ChatReasoningEffortLevel(level);
break;
}

Expand Down
89 changes: 88 additions & 1 deletion src/WebStarter/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@
"CachedAudioInputCost": 0,
"TextOutputCost": 0.03,
"AudioOutputCost": 0,
"ImageInputCost": 0.01,
"ImageInputCost": 0.01,
"CachedImageInputCost": 0.0025,
"ImageOutputCost": 0.04,
"ImageCosts": [
Expand Down Expand Up @@ -477,6 +477,93 @@
"TextOutputCost": 0.02,
"AudioOutputCost": 0.08
}
},
{
"Id": "gpt-5",
"Name": "gpt-5",
"Version": "gpt-5",
"ApiKey": "",
"Type": "chat",
"MultiModal": true,
"Capabilities": [
"Chat",
"ImageReading"
],
"Reasoning": {
"Temperature": 1.0,
"Parameters": {
"EffortLevel": {
"Default": "minimal",
"Options": [ "minimal", "low", "medium", "high" ]
}
}
},
"Cost": {
"TextInputCost": 0.005,
"CachedTextInputCost": 0.0025,
"AudioInputCost": 0.04,
"CachedAudioInputCost": 0.0025,
"TextOutputCost": 0.02,
"AudioOutputCost": 0.08
}
},
{
"Id": "gpt-5",
"Name": "gpt-5.1",
"Version": "gpt-5.1",
"ApiKey": "o",
"Type": "chat",
"MultiModal": true,
"Capabilities": [
"Chat",
"ImageReading"
],
"Reasoning": {
"Temperature": 1.0,
"Parameters": {
"EffortLevel": {
"Default": "low",
"Options": [ "none", "low", "medium", "high" ]
}
}
},
"Cost": {
"TextInputCost": 0.005,
"CachedTextInputCost": 0.0025,
"AudioInputCost": 0.04,
"CachedAudioInputCost": 0.0025,
"TextOutputCost": 0.02,
"AudioOutputCost": 0.08
}
},
{
"Id": "gpt-5",
"Name": "gpt-5.2",
"Version": "gpt-5.2",
"ApiKey": "",
"Type": "chat",
"MultiModal": true,
"Capabilities": [
"Chat",
"ImageReading"
],
"Reasoning": {
"Temperature": 1,
"Parameters": {
"EffortLevel": {
"Default": "low",
"Options": [ "none", "low", "medium", "high", "xhigh" ]
}
}
},
"Cost": {
"TextInputCost": 0.005,
"CachedTextInputCost": 0.0025,
"AudioInputCost": 0.04,
"CachedAudioInputCost": 0.0025,
"TextOutputCost": 0.02,
"AudioOutputCost": 0.08
}
}
]
},
Expand Down
Loading