config.go 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855
  1. // Package config manages application configuration from various sources.
  2. package config
  3. import (
  4. "encoding/json"
  5. "fmt"
  6. "log/slog"
  7. "os"
  8. "os/user"
  9. "path/filepath"
  10. "strings"
  11. "github.com/spf13/viper"
  12. "github.com/sst/opencode/internal/llm/models"
  13. )
  14. // MCPType defines the type of MCP (Model Control Protocol) server.
  15. type MCPType string
  16. // Supported MCP types
  17. const (
  18. MCPStdio MCPType = "stdio"
  19. MCPSse MCPType = "sse"
  20. )
  21. // MCPServer defines the configuration for a Model Control Protocol server.
  22. type MCPServer struct {
  23. Command string `json:"command"`
  24. Env []string `json:"env"`
  25. Args []string `json:"args"`
  26. Type MCPType `json:"type"`
  27. URL string `json:"url"`
  28. Headers map[string]string `json:"headers"`
  29. }
  30. type AgentName string
  31. const (
  32. AgentPrimary AgentName = "primary"
  33. AgentTask AgentName = "task"
  34. AgentTitle AgentName = "title"
  35. )
  36. // Agent defines configuration for different LLM models and their token limits.
  37. type Agent struct {
  38. Model models.ModelID `json:"model"`
  39. MaxTokens int64 `json:"maxTokens"`
  40. ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
  41. }
  42. // Provider defines configuration for an LLM provider.
  43. type Provider struct {
  44. APIKey string `json:"apiKey"`
  45. Disabled bool `json:"disabled"`
  46. }
  47. // Data defines storage configuration.
  48. type Data struct {
  49. Directory string `json:"directory,omitempty"`
  50. }
  51. // LSPConfig defines configuration for Language Server Protocol integration.
  52. type LSPConfig struct {
  53. Disabled bool `json:"enabled"`
  54. Command string `json:"command"`
  55. Args []string `json:"args"`
  56. Options any `json:"options"`
  57. }
  58. // TUIConfig defines the configuration for the Terminal User Interface.
  59. type TUIConfig struct {
  60. Theme string `json:"theme,omitempty"`
  61. CustomTheme map[string]any `json:"customTheme,omitempty"`
  62. }
  63. // ShellConfig defines the configuration for the shell used by the bash tool.
  64. type ShellConfig struct {
  65. Path string `json:"path,omitempty"`
  66. Args []string `json:"args,omitempty"`
  67. }
  68. // Config is the main configuration structure for the application.
  69. type Config struct {
  70. Data Data `json:"data"`
  71. WorkingDir string `json:"wd,omitempty"`
  72. MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
  73. Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
  74. LSP map[string]LSPConfig `json:"lsp,omitempty"`
  75. Agents map[AgentName]Agent `json:"agents,omitempty"`
  76. Debug bool `json:"debug,omitempty"`
  77. DebugLSP bool `json:"debugLSP,omitempty"`
  78. ContextPaths []string `json:"contextPaths,omitempty"`
  79. TUI TUIConfig `json:"tui"`
  80. Shell ShellConfig `json:"shell,omitempty"`
  81. }
  82. // Application constants
  83. const (
  84. defaultDataDirectory = ".opencode"
  85. defaultLogLevel = "info"
  86. appName = "opencode"
  87. MaxTokensFallbackDefault = 4096
  88. )
  89. var defaultContextPaths = []string{
  90. ".github/copilot-instructions.md",
  91. ".cursorrules",
  92. ".cursor/rules/",
  93. "CLAUDE.md",
  94. "CLAUDE.local.md",
  95. "CONTEXT.md",
  96. "CONTEXT.local.md",
  97. "opencode.md",
  98. "opencode.local.md",
  99. "OpenCode.md",
  100. "OpenCode.local.md",
  101. "OPENCODE.md",
  102. "OPENCODE.local.md",
  103. }
  104. // Global configuration instance
  105. var cfg *Config
  106. // Load initializes the configuration from environment variables and config files.
  107. // If debug is true, debug mode is enabled and log level is set to debug.
  108. // It returns an error if configuration loading fails.
  109. func Load(workingDir string, debug bool) (*Config, error) {
  110. if cfg != nil {
  111. return cfg, nil
  112. }
  113. cfg = &Config{
  114. WorkingDir: workingDir,
  115. MCPServers: make(map[string]MCPServer),
  116. Providers: make(map[models.ModelProvider]Provider),
  117. LSP: make(map[string]LSPConfig),
  118. }
  119. configureViper()
  120. setDefaults(debug)
  121. // Read global config
  122. if err := readConfig(viper.ReadInConfig()); err != nil {
  123. return cfg, err
  124. }
  125. // Load and merge local config
  126. mergeLocalConfig(workingDir)
  127. setProviderDefaults()
  128. // Apply configuration to the struct
  129. if err := viper.Unmarshal(cfg); err != nil {
  130. return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
  131. }
  132. applyDefaultValues()
  133. defaultLevel := slog.LevelInfo
  134. if cfg.Debug {
  135. defaultLevel = slog.LevelDebug
  136. }
  137. slog.SetLogLoggerLevel(defaultLevel)
  138. // Validate configuration
  139. if err := Validate(); err != nil {
  140. return cfg, fmt.Errorf("config validation failed: %w", err)
  141. }
  142. if cfg.Agents == nil {
  143. cfg.Agents = make(map[AgentName]Agent)
  144. }
  145. // Override the max tokens for title agent
  146. cfg.Agents[AgentTitle] = Agent{
  147. Model: cfg.Agents[AgentTitle].Model,
  148. MaxTokens: 80,
  149. }
  150. return cfg, nil
  151. }
  152. // configureViper sets up viper's configuration paths and environment variables.
  153. func configureViper() {
  154. viper.SetConfigName(fmt.Sprintf(".%s", appName))
  155. viper.SetConfigType("json")
  156. viper.AddConfigPath("$HOME")
  157. viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
  158. viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
  159. viper.SetEnvPrefix(strings.ToUpper(appName))
  160. viper.AutomaticEnv()
  161. }
  162. // setDefaults configures default values for configuration options.
  163. func setDefaults(debug bool) {
  164. viper.SetDefault("data.directory", defaultDataDirectory)
  165. viper.SetDefault("contextPaths", defaultContextPaths)
  166. viper.SetDefault("tui.theme", "opencode")
  167. if debug {
  168. viper.SetDefault("debug", true)
  169. viper.Set("log.level", "debug")
  170. } else {
  171. viper.SetDefault("debug", false)
  172. viper.SetDefault("log.level", defaultLogLevel)
  173. }
  174. }
  175. // setProviderDefaults configures LLM provider defaults based on provider provided by
  176. // environment variables and configuration file.
  177. func setProviderDefaults() {
  178. // Set all API keys we can find in the environment
  179. if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
  180. viper.SetDefault("providers.anthropic.apiKey", apiKey)
  181. }
  182. if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
  183. viper.SetDefault("providers.openai.apiKey", apiKey)
  184. }
  185. if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
  186. viper.SetDefault("providers.gemini.apiKey", apiKey)
  187. }
  188. if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
  189. viper.SetDefault("providers.groq.apiKey", apiKey)
  190. }
  191. if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
  192. viper.SetDefault("providers.openrouter.apiKey", apiKey)
  193. }
  194. if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
  195. viper.SetDefault("providers.xai.apiKey", apiKey)
  196. }
  197. if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
  198. // api-key may be empty when using Entra ID credentials – that's okay
  199. viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
  200. }
  201. // Use this order to set the default models
  202. // 1. Anthropic
  203. // 2. OpenAI
  204. // 3. Google Gemini
  205. // 4. Groq
  206. // 5. OpenRouter
  207. // 6. AWS Bedrock
  208. // 7. Azure
  209. // 8. Google Cloud VertexAI
  210. // Anthropic configuration
  211. if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
  212. viper.SetDefault("agents.primary.model", models.Claude4Sonnet)
  213. viper.SetDefault("agents.task.model", models.Claude4Sonnet)
  214. viper.SetDefault("agents.title.model", models.Claude4Sonnet)
  215. return
  216. }
  217. // OpenAI configuration
  218. if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
  219. viper.SetDefault("agents.primary.model", models.GPT41)
  220. viper.SetDefault("agents.task.model", models.GPT41Mini)
  221. viper.SetDefault("agents.title.model", models.GPT41Mini)
  222. return
  223. }
  224. // Google Gemini configuration
  225. if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
  226. viper.SetDefault("agents.primary.model", models.Gemini25)
  227. viper.SetDefault("agents.task.model", models.Gemini25Flash)
  228. viper.SetDefault("agents.title.model", models.Gemini25Flash)
  229. return
  230. }
  231. // Groq configuration
  232. if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
  233. viper.SetDefault("agents.primary.model", models.QWENQwq)
  234. viper.SetDefault("agents.task.model", models.QWENQwq)
  235. viper.SetDefault("agents.title.model", models.QWENQwq)
  236. return
  237. }
  238. // OpenRouter configuration
  239. if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
  240. viper.SetDefault("agents.primary.model", models.OpenRouterClaude37Sonnet)
  241. viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
  242. viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
  243. return
  244. }
  245. // XAI configuration
  246. if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
  247. viper.SetDefault("agents.primary.model", models.XAIGrok3Beta)
  248. viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
  249. viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
  250. return
  251. }
  252. // AWS Bedrock configuration
  253. if hasAWSCredentials() {
  254. viper.SetDefault("agents.primary.model", models.BedrockClaude37Sonnet)
  255. viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
  256. viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
  257. return
  258. }
  259. // Azure OpenAI configuration
  260. if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
  261. viper.SetDefault("agents.primary.model", models.AzureGPT41)
  262. viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
  263. viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
  264. return
  265. }
  266. // Google Cloud VertexAI configuration
  267. if hasVertexAICredentials() {
  268. viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
  269. viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
  270. viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
  271. viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
  272. return
  273. }
  274. }
  275. // hasAWSCredentials checks if AWS credentials are available in the environment.
  276. func hasAWSCredentials() bool {
  277. // Check for explicit AWS credentials
  278. if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
  279. return true
  280. }
  281. // Check for AWS profile
  282. if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
  283. return true
  284. }
  285. // Check for AWS region
  286. if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
  287. return true
  288. }
  289. // Check if running on EC2 with instance profile
  290. if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
  291. os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
  292. return true
  293. }
  294. return false
  295. }
  296. // hasVertexAICredentials checks if VertexAI credentials are available in the environment.
  297. func hasVertexAICredentials() bool {
  298. // Check for explicit VertexAI parameters
  299. if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
  300. return true
  301. }
  302. // Check for Google Cloud project and location
  303. if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
  304. return true
  305. }
  306. return false
  307. }
  308. // readConfig handles the result of reading a configuration file.
  309. func readConfig(err error) error {
  310. if err == nil {
  311. return nil
  312. }
  313. // It's okay if the config file doesn't exist
  314. if _, ok := err.(viper.ConfigFileNotFoundError); ok {
  315. return nil
  316. }
  317. return fmt.Errorf("failed to read config: %w", err)
  318. }
  319. // mergeLocalConfig loads and merges configuration from the local directory.
  320. func mergeLocalConfig(workingDir string) {
  321. local := viper.New()
  322. local.SetConfigName(fmt.Sprintf(".%s", appName))
  323. local.SetConfigType("json")
  324. local.AddConfigPath(workingDir)
  325. // Merge local config if it exists
  326. if err := local.ReadInConfig(); err == nil {
  327. viper.MergeConfigMap(local.AllSettings())
  328. }
  329. }
  330. // applyDefaultValues sets default values for configuration fields that need processing.
  331. func applyDefaultValues() {
  332. // Set default MCP type if not specified
  333. for k, v := range cfg.MCPServers {
  334. if v.Type == "" {
  335. v.Type = MCPStdio
  336. cfg.MCPServers[k] = v
  337. }
  338. }
  339. }
  340. // It validates model IDs and providers, ensuring they are supported.
  341. func validateAgent(cfg *Config, name AgentName, agent Agent) error {
  342. // Check if model exists
  343. model, modelExists := models.SupportedModels[agent.Model]
  344. if !modelExists {
  345. slog.Warn("unsupported model configured, reverting to default",
  346. "agent", name,
  347. "configured_model", agent.Model)
  348. // Set default model based on available providers
  349. if setDefaultModelForAgent(name) {
  350. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  351. } else {
  352. return fmt.Errorf("no valid provider available for agent %s", name)
  353. }
  354. return nil
  355. }
  356. // Check if provider for the model is configured
  357. provider := model.Provider
  358. providerCfg, providerExists := cfg.Providers[provider]
  359. if !providerExists {
  360. // Provider not configured, check if we have environment variables
  361. apiKey := getProviderAPIKey(provider)
  362. if apiKey == "" {
  363. slog.Warn("provider not configured for model, reverting to default",
  364. "agent", name,
  365. "model", agent.Model,
  366. "provider", provider)
  367. // Set default model based on available providers
  368. if setDefaultModelForAgent(name) {
  369. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  370. } else {
  371. return fmt.Errorf("no valid provider available for agent %s", name)
  372. }
  373. } else {
  374. // Add provider with API key from environment
  375. cfg.Providers[provider] = Provider{
  376. APIKey: apiKey,
  377. }
  378. slog.Info("added provider from environment", "provider", provider)
  379. }
  380. } else if providerCfg.Disabled || providerCfg.APIKey == "" {
  381. // Provider is disabled or has no API key
  382. slog.Warn("provider is disabled or has no API key, reverting to default",
  383. "agent", name,
  384. "model", agent.Model,
  385. "provider", provider)
  386. // Set default model based on available providers
  387. if setDefaultModelForAgent(name) {
  388. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  389. } else {
  390. return fmt.Errorf("no valid provider available for agent %s", name)
  391. }
  392. }
  393. // Validate max tokens
  394. if agent.MaxTokens <= 0 {
  395. slog.Warn("invalid max tokens, setting to default",
  396. "agent", name,
  397. "model", agent.Model,
  398. "max_tokens", agent.MaxTokens)
  399. // Update the agent with default max tokens
  400. updatedAgent := cfg.Agents[name]
  401. if model.DefaultMaxTokens > 0 {
  402. updatedAgent.MaxTokens = model.DefaultMaxTokens
  403. } else {
  404. updatedAgent.MaxTokens = MaxTokensFallbackDefault
  405. }
  406. cfg.Agents[name] = updatedAgent
  407. } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
  408. // Ensure max tokens doesn't exceed half the context window (reasonable limit)
  409. slog.Warn("max tokens exceeds half the context window, adjusting",
  410. "agent", name,
  411. "model", agent.Model,
  412. "max_tokens", agent.MaxTokens,
  413. "context_window", model.ContextWindow)
  414. // Update the agent with adjusted max tokens
  415. updatedAgent := cfg.Agents[name]
  416. updatedAgent.MaxTokens = model.ContextWindow / 2
  417. cfg.Agents[name] = updatedAgent
  418. }
  419. // Validate reasoning effort for models that support reasoning
  420. if model.CanReason && provider == models.ProviderOpenAI {
  421. if agent.ReasoningEffort == "" {
  422. // Set default reasoning effort for models that support it
  423. slog.Info("setting default reasoning effort for model that supports reasoning",
  424. "agent", name,
  425. "model", agent.Model)
  426. // Update the agent with default reasoning effort
  427. updatedAgent := cfg.Agents[name]
  428. updatedAgent.ReasoningEffort = "medium"
  429. cfg.Agents[name] = updatedAgent
  430. } else {
  431. // Check if reasoning effort is valid (low, medium, high)
  432. effort := strings.ToLower(agent.ReasoningEffort)
  433. if effort != "low" && effort != "medium" && effort != "high" {
  434. slog.Warn("invalid reasoning effort, setting to medium",
  435. "agent", name,
  436. "model", agent.Model,
  437. "reasoning_effort", agent.ReasoningEffort)
  438. // Update the agent with valid reasoning effort
  439. updatedAgent := cfg.Agents[name]
  440. updatedAgent.ReasoningEffort = "medium"
  441. cfg.Agents[name] = updatedAgent
  442. }
  443. }
  444. } else if !model.CanReason && agent.ReasoningEffort != "" {
  445. // Model doesn't support reasoning but reasoning effort is set
  446. slog.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
  447. "agent", name,
  448. "model", agent.Model,
  449. "reasoning_effort", agent.ReasoningEffort)
  450. // Update the agent to remove reasoning effort
  451. updatedAgent := cfg.Agents[name]
  452. updatedAgent.ReasoningEffort = ""
  453. cfg.Agents[name] = updatedAgent
  454. }
  455. return nil
  456. }
  457. // Validate checks if the configuration is valid and applies defaults where needed.
  458. func Validate() error {
  459. if cfg == nil {
  460. return fmt.Errorf("config not loaded")
  461. }
  462. // Validate agent models
  463. for name, agent := range cfg.Agents {
  464. if err := validateAgent(cfg, name, agent); err != nil {
  465. return err
  466. }
  467. }
  468. // Validate providers
  469. for provider, providerCfg := range cfg.Providers {
  470. if providerCfg.APIKey == "" && !providerCfg.Disabled {
  471. slog.Warn("provider has no API key, marking as disabled", "provider", provider)
  472. providerCfg.Disabled = true
  473. cfg.Providers[provider] = providerCfg
  474. }
  475. }
  476. // Validate LSP configurations
  477. for language, lspConfig := range cfg.LSP {
  478. if lspConfig.Command == "" && !lspConfig.Disabled {
  479. slog.Warn("LSP configuration has no command, marking as disabled", "language", language)
  480. lspConfig.Disabled = true
  481. cfg.LSP[language] = lspConfig
  482. }
  483. }
  484. return nil
  485. }
  486. // getProviderAPIKey gets the API key for a provider from environment variables
  487. func getProviderAPIKey(provider models.ModelProvider) string {
  488. switch provider {
  489. case models.ProviderAnthropic:
  490. return os.Getenv("ANTHROPIC_API_KEY")
  491. case models.ProviderOpenAI:
  492. return os.Getenv("OPENAI_API_KEY")
  493. case models.ProviderGemini:
  494. return os.Getenv("GEMINI_API_KEY")
  495. case models.ProviderGROQ:
  496. return os.Getenv("GROQ_API_KEY")
  497. case models.ProviderAzure:
  498. return os.Getenv("AZURE_OPENAI_API_KEY")
  499. case models.ProviderOpenRouter:
  500. return os.Getenv("OPENROUTER_API_KEY")
  501. case models.ProviderBedrock:
  502. if hasAWSCredentials() {
  503. return "aws-credentials-available"
  504. }
  505. case models.ProviderVertexAI:
  506. if hasVertexAICredentials() {
  507. return "vertex-ai-credentials-available"
  508. }
  509. }
  510. return ""
  511. }
  512. // setDefaultModelForAgent sets a default model for an agent based on available providers
  513. func setDefaultModelForAgent(agent AgentName) bool {
  514. // Check providers in order of preference
  515. if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
  516. maxTokens := int64(5000)
  517. if agent == AgentTitle {
  518. maxTokens = 80
  519. }
  520. cfg.Agents[agent] = Agent{
  521. Model: models.Claude4Sonnet,
  522. MaxTokens: maxTokens,
  523. }
  524. return true
  525. }
  526. if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
  527. var model models.ModelID
  528. maxTokens := int64(5000)
  529. reasoningEffort := ""
  530. switch agent {
  531. case AgentTitle:
  532. model = models.GPT41Mini
  533. maxTokens = 80
  534. case AgentTask:
  535. model = models.GPT41Mini
  536. default:
  537. model = models.GPT41
  538. }
  539. // Check if model supports reasoning
  540. if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
  541. reasoningEffort = "medium"
  542. }
  543. cfg.Agents[agent] = Agent{
  544. Model: model,
  545. MaxTokens: maxTokens,
  546. ReasoningEffort: reasoningEffort,
  547. }
  548. return true
  549. }
  550. if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
  551. var model models.ModelID
  552. maxTokens := int64(5000)
  553. reasoningEffort := ""
  554. switch agent {
  555. case AgentTitle:
  556. model = models.OpenRouterClaude35Haiku
  557. maxTokens = 80
  558. case AgentTask:
  559. model = models.OpenRouterClaude37Sonnet
  560. default:
  561. model = models.OpenRouterClaude37Sonnet
  562. }
  563. // Check if model supports reasoning
  564. if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
  565. reasoningEffort = "medium"
  566. }
  567. cfg.Agents[agent] = Agent{
  568. Model: model,
  569. MaxTokens: maxTokens,
  570. ReasoningEffort: reasoningEffort,
  571. }
  572. return true
  573. }
  574. if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
  575. var model models.ModelID
  576. maxTokens := int64(5000)
  577. if agent == AgentTitle {
  578. model = models.Gemini25Flash
  579. maxTokens = 80
  580. } else {
  581. model = models.Gemini25
  582. }
  583. cfg.Agents[agent] = Agent{
  584. Model: model,
  585. MaxTokens: maxTokens,
  586. }
  587. return true
  588. }
  589. if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
  590. maxTokens := int64(5000)
  591. if agent == AgentTitle {
  592. maxTokens = 80
  593. }
  594. cfg.Agents[agent] = Agent{
  595. Model: models.QWENQwq,
  596. MaxTokens: maxTokens,
  597. }
  598. return true
  599. }
  600. if hasAWSCredentials() {
  601. maxTokens := int64(5000)
  602. if agent == AgentTitle {
  603. maxTokens = 80
  604. }
  605. cfg.Agents[agent] = Agent{
  606. Model: models.BedrockClaude37Sonnet,
  607. MaxTokens: maxTokens,
  608. ReasoningEffort: "medium", // Claude models support reasoning
  609. }
  610. return true
  611. }
  612. if hasVertexAICredentials() {
  613. var model models.ModelID
  614. maxTokens := int64(5000)
  615. if agent == AgentTitle {
  616. model = models.VertexAIGemini25Flash
  617. maxTokens = 80
  618. } else {
  619. model = models.VertexAIGemini25
  620. }
  621. cfg.Agents[agent] = Agent{
  622. Model: model,
  623. MaxTokens: maxTokens,
  624. }
  625. return true
  626. }
  627. return false
  628. }
  629. // Get returns the current configuration.
  630. // It's safe to call this function multiple times.
  631. func Get() *Config {
  632. return cfg
  633. }
  634. // WorkingDirectory returns the current working directory from the configuration.
  635. func WorkingDirectory() string {
  636. if cfg == nil {
  637. panic("config not loaded")
  638. }
  639. return cfg.WorkingDir
  640. }
  641. // GetHostname returns the system hostname or "User" if it can't be determined
  642. func GetHostname() (string, error) {
  643. hostname, err := os.Hostname()
  644. if err != nil {
  645. return "User", err
  646. }
  647. return hostname, nil
  648. }
  649. // GetUsername returns the current user's username
  650. func GetUsername() (string, error) {
  651. currentUser, err := user.Current()
  652. if err != nil {
  653. return "User", err
  654. }
  655. return currentUser.Username, nil
  656. }
  657. func updateCfgFile(updateCfg func(config *Config)) error {
  658. if cfg == nil {
  659. return fmt.Errorf("config not loaded")
  660. }
  661. // Get the config file path
  662. configFile := viper.ConfigFileUsed()
  663. var configData []byte
  664. if configFile == "" {
  665. homeDir, err := os.UserHomeDir()
  666. if err != nil {
  667. return fmt.Errorf("failed to get home directory: %w", err)
  668. }
  669. configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
  670. slog.Info("config file not found, creating new one", "path", configFile)
  671. configData = []byte(`{}`)
  672. } else {
  673. // Read the existing config file
  674. data, err := os.ReadFile(configFile)
  675. if err != nil {
  676. return fmt.Errorf("failed to read config file: %w", err)
  677. }
  678. configData = data
  679. }
  680. // Parse the JSON
  681. var userCfg *Config
  682. if err := json.Unmarshal(configData, &userCfg); err != nil {
  683. return fmt.Errorf("failed to parse config file: %w", err)
  684. }
  685. updateCfg(userCfg)
  686. // Write the updated config back to file
  687. updatedData, err := json.MarshalIndent(userCfg, "", " ")
  688. if err != nil {
  689. return fmt.Errorf("failed to marshal config: %w", err)
  690. }
  691. if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
  692. return fmt.Errorf("failed to write config file: %w", err)
  693. }
  694. return nil
  695. }
  696. func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
  697. if cfg == nil {
  698. panic("config not loaded")
  699. }
  700. existingAgentCfg := cfg.Agents[agentName]
  701. model, ok := models.SupportedModels[modelID]
  702. if !ok {
  703. return fmt.Errorf("model %s not supported", modelID)
  704. }
  705. maxTokens := existingAgentCfg.MaxTokens
  706. if model.DefaultMaxTokens > 0 {
  707. maxTokens = model.DefaultMaxTokens
  708. }
  709. newAgentCfg := Agent{
  710. Model: modelID,
  711. MaxTokens: maxTokens,
  712. ReasoningEffort: existingAgentCfg.ReasoningEffort,
  713. }
  714. cfg.Agents[agentName] = newAgentCfg
  715. if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
  716. // revert config update on failure
  717. cfg.Agents[agentName] = existingAgentCfg
  718. return fmt.Errorf("failed to update agent model: %w", err)
  719. }
  720. return updateCfgFile(func(config *Config) {
  721. if config.Agents == nil {
  722. config.Agents = make(map[AgentName]Agent)
  723. }
  724. config.Agents[agentName] = newAgentCfg
  725. })
  726. }
  727. // UpdateTheme updates the theme in the configuration and writes it to the config file.
  728. func UpdateTheme(themeName string) error {
  729. if cfg == nil {
  730. return fmt.Errorf("config not loaded")
  731. }
  732. // Update the in-memory config
  733. cfg.TUI.Theme = themeName
  734. // Update the file config
  735. return updateCfgFile(func(config *Config) {
  736. config.TUI.Theme = themeName
  737. })
  738. }