"use client" import { useState, useEffect } from "react" import { ChevronDown, Cpu } from "lucide-react" import { OllamaIcon } from "@/components/ui/ollama-icon" interface LLMModel { id: string name: string model: string provider: string description?: string } // Default models const DEFAULT_MODELS: LLMModel[] = [ { id: "ollama-llama3.1:8b", name: "Llama 3.1 8B", model: "llama3.1:8b", provider: "ollama", description: "Local Ollama model" }, { id: "nvidia-nemotron-super", name: "Nemotron Super 49B", model: "nvidia/llama-3.3-nemotron-super-49b-v1.5", provider: "nvidia", description: "NVIDIA API (requires key)" }, { id: "nvidia-nemotron-nano", name: "Nemotron Nano 9B v2", model: "nvidia/nvidia-nemotron-nano-9b-v2", provider: "nvidia", description: "NVIDIA API - Fast & efficient" }, ] export function LLMSelectorCompact() { const [models, setModels] = useState(DEFAULT_MODELS) const [selectedModel, setSelectedModel] = useState(DEFAULT_MODELS[0]) const [isOpen, setIsOpen] = useState(false) // Load Ollama models from settings useEffect(() => { try { const selectedOllamaModels = localStorage.getItem("selected_ollama_models") if (selectedOllamaModels) { const modelNames: string[] = JSON.parse(selectedOllamaModels) const ollamaModels: LLMModel[] = modelNames.map(name => ({ id: `ollama-${name}`, name: name, model: name, provider: "ollama", description: "Local Ollama model" })) // Combine with default models, avoiding duplicates const defaultOllamaIds = DEFAULT_MODELS .filter(m => m.provider === "ollama") .map(m => m.model) const uniqueOllamaModels = ollamaModels.filter( m => !defaultOllamaIds.includes(m.model) ) const allModels = [...DEFAULT_MODELS, ...uniqueOllamaModels] setModels(allModels) } } catch (error) { console.error("Error loading Ollama models:", error) } }, []) // Load selected model from localStorage useEffect(() => { try { const saved = localStorage.getItem("selectedModelForRAG") if (saved) { const savedModel: LLMModel = JSON.parse(saved) setSelectedModel(savedModel) } } catch (error) { console.error("Error loading selected model:", error) } }, []) // Save selected model to localStorage and dispatch event const handleSelectModel = (model: LLMModel) => { setSelectedModel(model) setIsOpen(false) localStorage.setItem("selectedModelForRAG", JSON.stringify(model)) // Dispatch event for other components window.dispatchEvent(new CustomEvent('ragModelSelected', { detail: { model } })) } const getModelIcon = (provider: string) => { if (provider === "ollama") { return } return } return (
{isOpen && ( <> {/* Backdrop */}
setIsOpen(false)} /> {/* Dropdown */}

Select LLM for Answer Generation

{models.map((model) => ( ))}
)}
) }