Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion src/lib/helpers/enums.js
Original file line number Diff line number Diff line change
Expand Up @@ -239,10 +239,12 @@ const llmModelCapability = {
export const LlmModelCapability = Object.freeze(llmModelCapability);

const reasoningEffortLevel = {
None: "none",
Minimal: "minimal",
Low: "low",
Medium: "medium",
High: "high"
High: "high",
XHigh: "xhigh"
};
export const ReasoningEffortLevel = Object.freeze(reasoningEffortLevel);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@
}

const recursiveDepthLowerLimit = 1;

/** @type {import('$commonTypes').LabelValuePair[]} */
const reasonLevelOptions = [
const defaultReasonLevelOptions = [
{ value: '', label: '' },
...Object.entries(ReasoningEffortLevel).map(([k, v]) => ({
value: v,
Expand All @@ -45,6 +45,9 @@
/** @type {import('$commonTypes').LlmModelSetting[]} */
let models = [];

/** @type {import('$commonTypes').LabelValuePair[]} */
let reasoningLevelOptions = defaultReasonLevelOptions;

$: isReasoningModel = models.find(x => x.name === config.model)?.reasoning != null;
$: {
if (llmConfigs.length > 0 && innerLlmConfigs.length === 0) {
Expand All @@ -58,6 +61,7 @@
const foundModel = models.find(x => x.name === config.model);
config.provider = foundProvider || null;
config.model = foundModel?.name || null;
onModelChanged(config);
}
}

Expand All @@ -83,13 +87,15 @@
config.is_inherit = false;
models = getLlmModels(provider);
config.model = models[0]?.name;
onModelChanged(config);
handleAgentChange();
}

/** @param {any} e */
function changeModel(e) {
config.is_inherit = false;
config.model = e.target.value || null;
onModelChanged(config);
handleAgentChange();
}

Expand Down Expand Up @@ -125,6 +131,36 @@
e.preventDefault();
}
}

/** @param {import('$agentTypes').AgentLlmConfig | null} config */
function onModelChanged(config) {
reasoningLevelOptions = getReasoningLevelOptions(config?.model);

if (config && !reasoningLevelOptions.some(x => x.value === config.reasoning_effort_level)) {
const defaultOption = reasoningLevelOptions.find(x => !!x.value)?.value || null;
config.reasoning_effort_level = defaultOption;
}
}

/** @param {string | null | undefined} model */
function getReasoningLevelOptions(model) {
let options = defaultReasonLevelOptions;
const foundModel = models.find(x => x.name === model);
if (foundModel?.reasoning == null) {
return options;
}

const defaultOptions = foundModel?.reasoning?.parameters?.EffortLevel?.options;
if (defaultOptions?.length > 0) {
options = [
{ value: '', label: '' },
// @ts-ignore
...defaultOptions.map(x => ({ value: x, label: x }))
];
}

return options;
}
</script>

<div class="agent-config-container">
Expand Down Expand Up @@ -203,11 +239,11 @@
{#if isReasoningModel}
<div class="mb-3 row llm-config-item">
<label for="chat-reasoning-effort" class="col-form-label llm-config-label">
Reasoning effort
Reasoning level
</label>
<div class="llm-config-input">
<Input type="select" id="chat-reasoning-effort" value={config.reasoning_effort_level} on:change={e => changeReasoningEffortLevel(e)}>
{#each reasonLevelOptions as option}
{#each reasoningLevelOptions as option}
<option value={option.value} selected={option.value == config.reasoning_effort_level}>
{option.label}
</option>
Expand Down