app/ui: harden thinking toggle support

Make composer thinking controls chat-local so per-request toggles override the global default without persisting incidental state changes. Update the thinking button visual state so the blue ring represents active thinking only while preserving keyboard focus styling with focus-visible.

Normalize /api/show capabilities for local, cloud, and app UI responses by deriving thinking_toggle and thinking_levels from exact parser support. Add exact architecture-to-parser mapping for known built-in architectures, including nemotron_h variants mapping to nemotron-3-nano, without fuzzy family matching.

Cover request think precedence, app/server capability normalization, parser architecture mapping, and non-toggle thinking models in tests.
This commit is contained in:
Patrick Kearney 2026-05-12 15:40:19 -04:00
parent efccc0a066
commit a212e5e508
8 changed files with 475 additions and 31 deletions

View file

@ -148,7 +148,7 @@ function ChatForm({
const {
settings: {
webSearchEnabled,
thinkEnabled,
thinkEnabled: defaultThinkEnabled,
thinkLevel: settingsThinkLevel,
},
setSettings,
@ -161,22 +161,32 @@ function ChatForm({
const modelSupportsThinkingLevels = useHasThinkingLevels(
selectedModel?.model,
);
// Use per-chat thinking level instead of global
const thinkLevel: ThinkingLevel =
const defaultThinkLevel: ThinkingLevel =
settingsThinkLevel === "none" || !settingsThinkLevel
? "medium"
: (settingsThinkLevel as ThinkingLevel);
const [chatThinkEnabled, setChatThinkEnabled] =
useState(defaultThinkEnabled);
const [chatThinkLevel, setChatThinkLevel] =
useState<ThinkingLevel>(defaultThinkLevel);
useEffect(() => {
setChatThinkEnabled(defaultThinkEnabled);
setChatThinkLevel(defaultThinkLevel);
}, [chatId, defaultThinkEnabled, defaultThinkLevel]);
const setThinkingLevel = (newLevel: ThinkingLevel) => {
setSettings({ ThinkLevel: newLevel });
setChatThinkEnabled(true);
setChatThinkLevel(newLevel);
};
useEffect(() => {
if (supportsThinkToggling && thinkEnabled && webSearchEnabled) {
if (supportsThinkToggling && chatThinkEnabled && webSearchEnabled) {
setSettings({ WebSearchEnabled: false });
}
}, [
selectedModel?.model,
supportsThinkToggling,
thinkEnabled,
chatThinkEnabled,
webSearchEnabled,
setSettings,
]);
@ -500,10 +510,10 @@ function ChatForm({
const useWebSearch =
supportsWebSearch && webSearchEnabled && !cloudDisabled;
const useThink = thinkEnabled
const useThink = chatThinkEnabled
? supportsThinkToggling
? modelSupportsThinkingLevels
? thinkLevel
? chatThinkLevel
: true
: undefined
: supportsThinking
@ -906,7 +916,9 @@ function ChatForm({
mode="thinkingLevel"
ref={thinkingLevelButtonRef}
isVisible={modelSupportsThinkingLevels}
currentLevel={thinkLevel}
isActive={chatThinkEnabled}
currentLevel={chatThinkLevel}
onToggle={() => setChatThinkEnabled(false)}
onLevelChange={setThinkingLevel}
onDropdownToggle={handleThinkingLevelDropdownToggle}
/>
@ -921,18 +933,16 @@ function ChatForm({
isVisible={
supportsThinkToggling && !modelSupportsThinkingLevels
}
isActive={thinkEnabled}
isActive={chatThinkEnabled}
onToggle={() => {
// DeepSeek-v3 specific - thinking and web search are mutually exclusive
if (supportsThinkToggling) {
const enable = !thinkEnabled;
setSettings({
ThinkEnabled: enable,
...(enable ? { WebSearchEnabled: false } : {}),
});
const enable = !chatThinkEnabled;
setChatThinkEnabled(enable);
if (enable) setSettings({ WebSearchEnabled: false });
return;
}
setSettings({ ThinkEnabled: !thinkEnabled });
setChatThinkEnabled(!chatThinkEnabled);
}}
/>
</>
@ -947,9 +957,9 @@ function ChatForm({
}
const enable = !webSearchEnabled;
if (supportsThinkToggling && enable) {
setChatThinkEnabled(false);
setSettings({
WebSearchEnabled: true,
ThinkEnabled: false,
});
return;
}

View file

@ -69,16 +69,18 @@ export const ThinkButton = forwardRef<HTMLButtonElement, ThinkButtonProps>(
if (!isVisible) return null;
const activeClass =
"text-[rgba(0,115,255,1)] dark:text-[rgba(70,155,255,1)] ring-2 ring-[rgba(0,115,255,0.28)] dark:ring-[rgba(70,155,255,0.32)]";
const inactiveClass = "text-neutral-500 dark:text-neutral-400 ring-0";
if (mode === "think") {
return (
<button
ref={ref}
title={isActive ? "Disable think mode" : "Enable think mode"}
onClick={onToggle}
className={`select-none flex items-center justify-center rounded-full h-9 w-9 bg-white dark:bg-neutral-700 focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer transition-all whitespace-nowrap border border-transparent ${
isActive
? "text-[rgba(0,115,255,1)] dark:text-[rgba(70,155,255,1)]"
: "text-neutral-500 dark:text-neutral-400"
className={`select-none flex items-center justify-center rounded-full h-9 w-9 bg-white dark:bg-neutral-700 focus:outline-none focus-visible:ring-2 focus-visible:ring-blue-500 cursor-pointer transition-all whitespace-nowrap border border-transparent ${
isActive ? activeClass : inactiveClass
}`}
>
<svg
@ -94,20 +96,25 @@ export const ThinkButton = forwardRef<HTMLButtonElement, ThinkButtonProps>(
}
// thinkingLevel mode
const displayLabel = currentLevel
? THINKING_LEVEL_LABELS[currentLevel]
: "";
const displayLabel =
isActive && currentLevel ? THINKING_LEVEL_LABELS[currentLevel] : "Off";
return (
<div className="relative" ref={dropdownRef}>
<button
ref={ref}
title={`Thinking level: ${displayLabel}`}
title={
isActive
? `Thinking level: ${displayLabel}`
: "Thinking disabled"
}
onClick={() => {
const newState = !isDropdownOpen;
setIsDropdownOpen(newState);
onDropdownToggle?.(newState);
}}
className={`select-none flex items-center justify-center gap-1 rounded-full h-9 px-3 bg-white dark:bg-neutral-700 focus:outline-none focus:ring-2 focus:ring-blue-500 cursor-pointer transition-all whitespace-nowrap border border-transparent text-[rgba(0,115,255,1)] dark:text-[rgba(70,155,255,1)]`}
className={`select-none flex items-center justify-center gap-1 rounded-full h-9 px-3 bg-white dark:bg-neutral-700 focus:outline-none focus-visible:ring-2 focus-visible:ring-blue-500 cursor-pointer transition-all whitespace-nowrap border border-transparent ${
isActive ? activeClass : inactiveClass
}`}
>
<div className="justify-center items-center flex space-x-2">
<svg
@ -137,6 +144,17 @@ export const ThinkButton = forwardRef<HTMLButtonElement, ThinkButtonProps>(
{isDropdownOpen && (
<div className="absolute bottom-full mb-2 text-[15px] rounded-2xl overflow-hidden bg-white border border-neutral-100 text-neutral-800 shadow-xl shadow-black/5 backdrop-blur-lg dark:border-neutral-600/40 dark:bg-neutral-800 dark:text-white dark:ring-black/20 min-w-[120px]">
<button
className={`w-full text-left px-3 py-2 cursor-pointer hover:bg-neutral-100 dark:hover:bg-neutral-700 transition-colors text-neutral-700 dark:text-neutral-300 ${
!isActive ? "bg-neutral-100 dark:bg-neutral-700/60" : ""
}`}
onClick={() => {
onToggle?.();
setIsDropdownOpen(false);
}}
>
Off
</button>
{Object.entries(THINKING_LEVELS).map(([, level]) => (
<button
key={level}

View file

@ -33,6 +33,7 @@ import (
ollamaAuth "github.com/ollama/ollama/auth"
"github.com/ollama/ollama/envconfig"
"github.com/ollama/ollama/manifest"
"github.com/ollama/ollama/model/parsers"
"github.com/ollama/ollama/types/model"
_ "github.com/tkrajina/typescriptify-golang-structs/typescriptify"
)
@ -296,7 +297,7 @@ func (s *Server) Handler() http.Handler {
// Ollama proxy endpoints
ollamaProxy := s.ollamaProxy()
mux.Handle("GET /api/tags", ollamaProxy)
mux.Handle("POST /api/show", ollamaProxy)
mux.Handle("POST /api/show", handle(s.showModel))
mux.Handle("GET /api/version", ollamaProxy)
mux.Handle("GET /api/status", ollamaProxy)
mux.Handle("HEAD /api/version", ollamaProxy)
@ -352,6 +353,32 @@ func (s *Server) inferenceClient() *api.Client {
return api.NewClient(envconfig.Host(), userAgentHTTPClient(0))
}
func (s *Server) showModel(w http.ResponseWriter, r *http.Request) error {
if err := WaitForServer(r.Context(), 10*time.Second); err != nil {
return err
}
var req api.ShowRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return fmt.Errorf("invalid request body: %w", err)
}
if req.Model == "" {
req.Model = req.Name
}
if req.Model == "" {
return fmt.Errorf("model is required")
}
resp, err := s.inferenceClient().Show(r.Context(), &req)
if err != nil {
return err
}
normalizeAppShowCapabilities(resp)
w.Header().Set("Content-Type", "application/json")
return json.NewEncoder(w).Encode(resp)
}
func userAgentHTTPClient(timeout time.Duration) *http.Client {
return &http.Client{
Timeout: timeout,
@ -1691,15 +1718,96 @@ func supportsBrowserTools(model string) bool {
}
func resolveChatThinkValue(settings store.Settings, requestThink any, modelSupportsThinking bool) any {
if !settings.ThinkEnabled {
return false
}
if requestThink != nil {
return requestThink
}
if !settings.ThinkEnabled {
return false
}
return modelSupportsThinking
}
func normalizeAppShowCapabilities(resp *api.ShowResponse) {
if resp == nil || !slices.Contains(resp.Capabilities, model.CapabilityThinking) {
return
}
if isGptOSSShowResponse(resp) {
appendShowCapability(resp, model.CapabilityThinkingToggle)
appendShowCapability(resp, model.CapabilityThinkingLevels)
return
}
if slices.Contains(resp.Capabilities, model.CapabilityThinkingToggle) {
return
}
for _, parserName := range showCapabilityParserCandidates(resp) {
parser := parsers.ParserForName(parserName)
if parser == nil {
parser = parsers.ParserForArchitecture(parserName)
}
if parser == nil {
continue
}
if parser.HasThinkingSupport() && parser.CanToggleThinking() {
appendShowCapability(resp, model.CapabilityThinkingToggle)
return
}
}
}
func appendShowCapability(resp *api.ShowResponse, cap model.Capability) {
if !slices.Contains(resp.Capabilities, cap) {
resp.Capabilities = append(resp.Capabilities, cap)
}
}
func isGptOSSShowResponse(resp *api.ShowResponse) bool {
for _, candidate := range showCapabilityParserCandidates(resp) {
if candidate == "gptoss" || candidate == "gpt-oss" {
return true
}
}
return false
}
func showCapabilityParserCandidates(resp *api.ShowResponse) []string {
rawCandidates := []string{
resp.Parser,
resp.Details.Family,
resp.Details.ParentModel,
resp.RemoteModel,
}
for _, key := range []string{"general.architecture", "general.basename"} {
if value, ok := resp.ModelInfo[key].(string); ok {
rawCandidates = append(rawCandidates, value)
}
}
candidates := make([]string, 0, len(rawCandidates)*2)
seen := map[string]struct{}{}
add := func(s string) {
s = strings.TrimSpace(strings.ToLower(s))
if s == "" {
return
}
if base, _, ok := strings.Cut(s, ":"); ok {
s = base
}
if _, ok := seen[s]; ok {
return
}
seen[s] = struct{}{}
candidates = append(candidates, s)
}
for _, candidate := range rawCandidates {
add(candidate)
}
return candidates
}
// buildChatRequest converts store.Chat to api.ChatRequest
func (s *Server) buildChatRequest(chat *store.Chat, model string, think any, availableTools []map[string]any) (*api.ChatRequest, error) {
var msgs []api.Message

View file

@ -11,6 +11,7 @@ import (
"net/http/httptest"
"path/filepath"
"runtime"
"slices"
"strings"
"sync/atomic"
"testing"
@ -18,6 +19,7 @@ import (
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/app/store"
"github.com/ollama/ollama/app/updater"
"github.com/ollama/ollama/types/model"
)
func TestHandlePostApiSettings(t *testing.T) {
@ -676,9 +678,15 @@ func TestResolveChatThinkValue(t *testing.T) {
want: false,
},
{
name: "global setting off overrides explicit request",
name: "explicit request overrides global setting off",
settings: store.Settings{ThinkEnabled: false},
requestThink: true,
want: true,
},
{
name: "explicit false overrides global setting on",
settings: store.Settings{ThinkEnabled: true},
requestThink: false,
want: false,
},
{
@ -711,6 +719,82 @@ func TestResolveChatThinkValue(t *testing.T) {
}
}
func TestNormalizeAppShowCapabilitiesThinkingControls(t *testing.T) {
tests := []struct {
name string
resp *api.ShowResponse
want []model.Capability
}{
{
name: "qwen adds thinking toggle",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
Details: api.ModelDetails{Family: "qwen3.5"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle},
},
{
name: "gemma parent adds thinking toggle",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
Details: api.ModelDetails{ParentModel: "gemma4:31b"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle},
},
{
name: "nemotron architecture adds thinking toggle",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
Details: api.ModelDetails{Family: "nemotron_h_moe"},
ModelInfo: map[string]any{"general.architecture": "nemotron_h_moe"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools, model.CapabilityThinkingToggle},
},
{
name: "gpt oss adds thinking toggle and levels",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
Details: api.ModelDetails{Family: "gptoss"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle, model.CapabilityThinkingLevels},
},
{
name: "non-toggle thinking model stays unchanged",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
Details: api.ModelDetails{Family: "qwen3-vl-thinking"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
{
name: "unknown cloud thinking architecture stays unchanged",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
Details: api.ModelDetails{Family: "kimi-k2"},
ModelInfo: map[string]any{"general.architecture": "kimi-k2"},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
},
{
name: "non-thinking model stays unchanged",
resp: &api.ShowResponse{
Capabilities: []model.Capability{model.CapabilityCompletion},
Details: api.ModelDetails{Family: "gemma4"},
},
want: []model.Capability{model.CapabilityCompletion},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
normalizeAppShowCapabilities(tt.resp)
if !slices.Equal(tt.resp.Capabilities, tt.want) {
t.Fatalf("capabilities = %v, want %v", tt.resp.Capabilities, tt.want)
}
})
}
}
func TestSettingsToggleAutoUpdateOff_CancelsDownload(t *testing.T) {
testStore := &store.Store{
DBPath: filepath.Join(t.TempDir(), "db.sqlite"),

View file

@ -39,6 +39,26 @@ func Register(name string, constructor ParserConstructor) {
registry.Register(name, constructor)
}
func ParserNameForArchitecture(arch string) string {
switch strings.TrimSpace(arch) {
case "gemma4":
return "gemma4"
case "laguna":
return "laguna"
case "nemotron_h", "nemotron_h_moe", "nemotron_h_omni":
return "nemotron-3-nano"
default:
return ""
}
}
func ParserForArchitecture(arch string) Parser {
if name := ParserNameForArchitecture(arch); name != "" {
return ParserForName(name)
}
return nil
}
func ParserForName(name string) Parser {
if parser, ok := registry.constructors[name]; ok {
return parser()

View file

@ -73,6 +73,44 @@ func TestCanToggleThinking(t *testing.T) {
}
}
func TestParserForArchitecture(t *testing.T) {
tests := []struct {
arch string
wantParser string
wantToggle bool
}{
{arch: "gemma4", wantParser: "gemma4", wantToggle: true},
{arch: "laguna", wantParser: "laguna", wantToggle: true},
{arch: "nemotron_h", wantParser: "nemotron-3-nano", wantToggle: true},
{arch: "nemotron_h_moe", wantParser: "nemotron-3-nano", wantToggle: true},
{arch: "nemotron_h_omni", wantParser: "nemotron-3-nano", wantToggle: true},
{arch: "glm5.1"},
{arch: "kimi-k2"},
}
for _, tt := range tests {
t.Run(tt.arch, func(t *testing.T) {
if got := ParserNameForArchitecture(tt.arch); got != tt.wantParser {
t.Fatalf("ParserNameForArchitecture() = %q, want %q", got, tt.wantParser)
}
parser := ParserForArchitecture(tt.arch)
if tt.wantParser == "" {
if parser != nil {
t.Fatalf("ParserForArchitecture() = %#v, want nil", parser)
}
return
}
if parser == nil {
t.Fatalf("ParserForArchitecture() = nil, want parser")
}
if got := parser.CanToggleThinking(); got != tt.wantToggle {
t.Fatalf("CanToggleThinking() = %v, want %v", got, tt.wantToggle)
}
})
}
}
func TestRegisterCustomParser(t *testing.T) {
// Register a custom parser
Register("custom-parser", func() Parser {

View file

@ -1154,6 +1154,14 @@ func (s *Server) ShowHandler(c *gin.Context) {
ctx = c.Request.Context()
}
if resp, ok := s.modelCaches.show.GetCloudSWR(ctx, req); ok {
normalizeShowCapabilities(resp)
c.JSON(http.StatusOK, resp)
return
}
resp, err := s.modelCaches.show.fetchCloudShow(ctx, req.Model, req.Verbose)
if err == nil {
normalizeShowCapabilities(resp)
s.modelCaches.show.setCloud(modelShowCloudKeyForModel(req.Model, req.Verbose), resp)
c.JSON(http.StatusOK, resp)
return
}
@ -1184,6 +1192,7 @@ func (s *Server) ShowHandler(c *gin.Context) {
}
return
}
normalizeShowCapabilities(resp)
if modelRef.Source == modelSourceLocal && resp.RemoteHost != "" {
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", modelRef.Original)})
@ -1204,6 +1213,87 @@ func (s *Server) ShowHandler(c *gin.Context) {
c.JSON(http.StatusOK, resp)
}
func normalizeShowCapabilities(resp *api.ShowResponse) {
if resp == nil || !slices.Contains(resp.Capabilities, model.CapabilityThinking) {
return
}
if isGptOSSFamily(resp) {
appendCapability(resp, model.CapabilityThinkingToggle)
appendCapability(resp, model.CapabilityThinkingLevels)
return
}
if slices.Contains(resp.Capabilities, model.CapabilityThinkingToggle) {
return
}
for _, parserName := range showCapabilityParserCandidates(resp) {
parser := parsers.ParserForName(parserName)
if parser == nil {
parser = parsers.ParserForArchitecture(parserName)
}
if parser == nil {
continue
}
if parser.HasThinkingSupport() && parser.CanToggleThinking() {
appendCapability(resp, model.CapabilityThinkingToggle)
return
}
}
}
func appendCapability(resp *api.ShowResponse, cap model.Capability) {
if !slices.Contains(resp.Capabilities, cap) {
resp.Capabilities = append(resp.Capabilities, cap)
}
}
func isGptOSSFamily(resp *api.ShowResponse) bool {
for _, candidate := range showCapabilityParserCandidates(resp) {
if candidate == "gptoss" || candidate == "gpt-oss" {
return true
}
}
return false
}
func showCapabilityParserCandidates(resp *api.ShowResponse) []string {
rawCandidates := []string{
resp.Parser,
resp.Details.Family,
resp.Details.ParentModel,
resp.RemoteModel,
}
for _, key := range []string{"general.architecture", "general.basename"} {
if value, ok := resp.ModelInfo[key].(string); ok {
rawCandidates = append(rawCandidates, value)
}
}
candidates := make([]string, 0, len(rawCandidates)*2)
seen := map[string]struct{}{}
add := func(s string) {
s = strings.TrimSpace(strings.ToLower(s))
if s == "" {
return
}
if base, _, ok := strings.Cut(s, ":"); ok {
s = base
}
if _, ok := seen[s]; ok {
return
}
seen[s] = struct{}{}
candidates = append(candidates, s)
}
for _, candidate := range rawCandidates {
add(candidate)
}
return candidates
}
func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
name := model.ParseName(req.Model)
if !name.IsValid() {

View file

@ -789,6 +789,82 @@ func TestShow(t *testing.T) {
}
}
func TestNormalizeShowCapabilitiesThinkingControls(t *testing.T) {
tests := []struct {
name string
resp api.ShowResponse
want []model.Capability
}{
{
name: "known qwen3.5 cloud family gets toggle",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "qwen3.5"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle},
},
{
name: "known gemma4 parent model gets toggle",
resp: api.ShowResponse{
Details: api.ModelDetails{ParentModel: "gemma4:31b"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle},
},
{
name: "known nemotron architecture gets toggle",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "nemotron_h_moe"},
ModelInfo: map[string]any{"general.architecture": "nemotron_h_moe"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools, model.CapabilityThinkingToggle},
},
{
name: "gpt oss gets toggle and levels",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "gptoss"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityThinkingToggle, model.CapabilityThinkingLevels},
},
{
name: "known non-toggle parser stays thinking only",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "qwen3-vl-thinking"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking},
},
{
name: "unknown cloud thinking architecture stays unchanged",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "kimi-k2"},
ModelInfo: map[string]any{"general.architecture": "kimi-k2"},
Capabilities: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
},
want: []model.Capability{model.CapabilityCompletion, model.CapabilityThinking, model.CapabilityTools},
},
{
name: "non-thinking model is unchanged",
resp: api.ShowResponse{
Details: api.ModelDetails{Family: "gemma4"},
Capabilities: []model.Capability{model.CapabilityCompletion},
},
want: []model.Capability{model.CapabilityCompletion},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
normalizeShowCapabilities(&tt.resp)
if !slices.Equal(tt.resp.Capabilities, tt.want) {
t.Fatalf("capabilities = %v, want %v", tt.resp.Capabilities, tt.want)
}
})
}
}
func TestShowCopilotUserAgentOverwritesExistingBasename(t *testing.T) {
t.Setenv("OLLAMA_MODELS", t.TempDir())