//
// SPDX-FileCopyrightText: Copyright (c) 1993-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
"use client"
import { useState, useEffect, useRef } from "react"
import { createPortal } from "react-dom"
import { ChevronDown, Sparkles, Cpu, Server } from "lucide-react"
import { OllamaIcon } from "@/components/ui/ollama-icon"
// Base models - NVIDIA NeMo as default (first in list)
const baseModels = [
{
id: "nvidia-nemotron",
name: "NVIDIA Llama 3.3 Nemotron Super 49B",
icon: ,
description: "NVIDIA hosted Nemotron Super 49B v1.5 model",
model: "nvidia/llama-3.3-nemotron-super-49b-v1.5",
apiKeyName: "NVIDIA_API_KEY",
baseURL: "https://integrate.api.nvidia.com/v1",
},
{
id: "nvidia-nemotron-nano",
name: "NVIDIA Nemotron Nano 9B v2",
icon: ,
description: "NVIDIA hosted Nemotron Nano 9B v2 - Faster and more efficient",
model: "nvidia/nvidia-nemotron-nano-9b-v2",
apiKeyName: "NVIDIA_API_KEY",
baseURL: "https://integrate.api.nvidia.com/v1",
},
// Preset Ollama model
{
id: "ollama-llama3.1:8b",
name: "Ollama llama3.1:8b",
icon: ,
description: "Local Ollama server with llama3.1:8b model",
model: "llama3.1:8b",
baseURL: "http://localhost:11434/v1",
provider: "ollama",
},
]
// vLLM models removed per user request
// Helper function to create Ollama model objects
const createOllamaModel = (modelName: string) => ({
id: `ollama-${modelName}`,
name: `Ollama ${modelName}`,
icon: ,
description: `Local Ollama server with ${modelName} model`,
model: modelName,
baseURL: "http://localhost:11434/v1",
provider: "ollama",
})
export function ModelSelector() {
const [models, setModels] = useState(() => [...baseModels])
const [selectedModel, setSelectedModel] = useState(() => {
// Try to find a default Ollama model first
const defaultOllama = models.find(m => m.provider === "ollama")
return defaultOllama || models[0]
})
const [isOpen, setIsOpen] = useState(false)
const buttonRef = useRef(null)
const containerRef = useRef(null)
const [mounted, setMounted] = useState(false)
// Load configured Ollama models
const loadOllamaModels = () => {
try {
const selectedOllamaModels = localStorage.getItem("selected_ollama_models")
if (selectedOllamaModels) {
const modelNames = JSON.parse(selectedOllamaModels)
// Filter out models that are already in baseModels to avoid duplicates
const baseModelNames = baseModels.filter(m => m.provider === "ollama").map(m => m.model)
const filteredModelNames = modelNames.filter((name: string) => !baseModelNames.includes(name))
const ollamaModels = filteredModelNames.map(createOllamaModel)
const newModels = [...baseModels, ...ollamaModels]
setModels(newModels)
return newModels
}
} catch (error) {
console.error("Error loading Ollama models:", error)
}
// Return base models if no Ollama models configured
return [...baseModels]
}
// Dispatch custom event when model changes
const updateSelectedModel = (model: any) => {
setSelectedModel(model)
// Dispatch a custom event with the selected model data
const event = new CustomEvent('modelSelected', {
detail: { model }
})
window.dispatchEvent(event)
}
useEffect(() => {
// Save selected model to localStorage
localStorage.setItem("selectedModel", JSON.stringify(selectedModel))
}, [selectedModel])
// Initialize models and selected model
useEffect(() => {
const loadedModels = loadOllamaModels()
// Try to restore selected model from localStorage
const savedModel = localStorage.getItem("selectedModel")
if (savedModel) {
try {
const parsed = JSON.parse(savedModel)
// Find matching model in our current models array
const matchingModel = loadedModels.find(m => m.id === parsed.id)
if (matchingModel) {
updateSelectedModel(matchingModel)
} else {
// If saved model not found, use first available model
updateSelectedModel(loadedModels[0])
}
} catch (e) {
console.error("Error parsing saved model", e)
updateSelectedModel(loadedModels[0])
}
} else {
// If no model in localStorage, use first available model
updateSelectedModel(loadedModels[0])
}
}, [])
// Listen for Ollama model updates
useEffect(() => {
const handleOllamaUpdate = (event: CustomEvent) => {
console.log("Ollama models updated, reloading...")
const newModels = loadOllamaModels()
// Check if current selected model still exists
const currentModelStillExists = newModels.find(m => m.id === selectedModel.id)
if (!currentModelStillExists) {
// Select first available model if current one is no longer available
updateSelectedModel(newModels[0])
}
}
window.addEventListener('ollama-models-updated', handleOllamaUpdate as EventListener)
return () => {
window.removeEventListener('ollama-models-updated', handleOllamaUpdate as EventListener)
}
}, [selectedModel.id])
// Set mounted state after component mounts (for SSR compatibility)
useEffect(() => {
setMounted(true)
}, [])
// Close on outside click and Escape
useEffect(() => {
const handleMouseDown = (e: MouseEvent) => {
if (containerRef.current && !containerRef.current.contains(e.target as Node)) {
setIsOpen(false)
}
}
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Escape') setIsOpen(false)
}
document.addEventListener('mousedown', handleMouseDown)
document.addEventListener('keydown', handleKeyDown)
return () => {
document.removeEventListener('mousedown', handleMouseDown)
document.removeEventListener('keydown', handleKeyDown)
}
}, [])
return (