config.go 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856
  1. // Package config manages application configuration from various sources.
  2. package config
  3. import (
  4. "encoding/json"
  5. "fmt"
  6. "log/slog"
  7. "os"
  8. "os/user"
  9. "path/filepath"
  10. "strings"
  11. "github.com/spf13/viper"
  12. "github.com/sst/opencode/internal/llm/models"
  13. )
  14. // MCPType defines the type of MCP (Model Control Protocol) server.
  15. type MCPType string
  16. // Supported MCP types
  17. const (
  18. MCPStdio MCPType = "stdio"
  19. MCPSse MCPType = "sse"
  20. )
  21. // MCPServer defines the configuration for a Model Control Protocol server.
  22. type MCPServer struct {
  23. Command string `json:"command"`
  24. Env []string `json:"env"`
  25. Args []string `json:"args"`
  26. Type MCPType `json:"type"`
  27. URL string `json:"url"`
  28. Headers map[string]string `json:"headers"`
  29. }
  30. type AgentName string
  31. const (
  32. AgentPrimary AgentName = "primary"
  33. AgentTask AgentName = "task"
  34. AgentTitle AgentName = "title"
  35. )
  36. // Agent defines configuration for different LLM models and their token limits.
  37. type Agent struct {
  38. Model models.ModelID `json:"model"`
  39. MaxTokens int64 `json:"maxTokens"`
  40. ReasoningEffort string `json:"reasoningEffort"` // For openai models low,medium,heigh
  41. }
  42. // Provider defines configuration for an LLM provider.
  43. type Provider struct {
  44. APIKey string `json:"apiKey"`
  45. Disabled bool `json:"disabled"`
  46. }
  47. // Data defines storage configuration.
  48. type Data struct {
  49. Directory string `json:"directory,omitempty"`
  50. }
  51. // LSPConfig defines configuration for Language Server Protocol integration.
  52. type LSPConfig struct {
  53. Disabled bool `json:"enabled"`
  54. Command string `json:"command"`
  55. Args []string `json:"args"`
  56. Options any `json:"options"`
  57. }
  58. // TUIConfig defines the configuration for the Terminal User Interface.
  59. type TUIConfig struct {
  60. Theme string `json:"theme,omitempty"`
  61. CustomTheme map[string]any `json:"customTheme,omitempty"`
  62. }
  63. // ShellConfig defines the configuration for the shell used by the bash tool.
  64. type ShellConfig struct {
  65. Path string `json:"path,omitempty"`
  66. Args []string `json:"args,omitempty"`
  67. }
  68. // Config is the main configuration structure for the application.
  69. type Config struct {
  70. Data Data `json:"data"`
  71. WorkingDir string `json:"wd,omitempty"`
  72. MCPServers map[string]MCPServer `json:"mcpServers,omitempty"`
  73. Providers map[models.ModelProvider]Provider `json:"providers,omitempty"`
  74. LSP map[string]LSPConfig `json:"lsp,omitempty"`
  75. Agents map[AgentName]Agent `json:"agents,omitempty"`
  76. Debug bool `json:"debug,omitempty"`
  77. DebugLSP bool `json:"debugLSP,omitempty"`
  78. ContextPaths []string `json:"contextPaths,omitempty"`
  79. TUI TUIConfig `json:"tui"`
  80. Shell ShellConfig `json:"shell,omitempty"`
  81. }
  82. // Application constants
  83. const (
  84. defaultDataDirectory = ".opencode"
  85. defaultLogLevel = "info"
  86. appName = "opencode"
  87. MaxTokensFallbackDefault = 4096
  88. )
  89. var defaultContextPaths = []string{
  90. ".github/copilot-instructions.md",
  91. ".cursorrules",
  92. ".cursor/rules/",
  93. "CLAUDE.md",
  94. "CLAUDE.local.md",
  95. "CONTEXT.md",
  96. "CONTEXT.local.md",
  97. "opencode.md",
  98. "opencode.local.md",
  99. "OpenCode.md",
  100. "OpenCode.local.md",
  101. "OPENCODE.md",
  102. "OPENCODE.local.md",
  103. }
  104. // Global configuration instance
  105. var cfg *Config
  106. // Load initializes the configuration from environment variables and config files.
  107. // If debug is true, debug mode is enabled and log level is set to debug.
  108. // It returns an error if configuration loading fails.
  109. func Load(workingDir string, debug bool, lvl *slog.LevelVar) (*Config, error) {
  110. if cfg != nil {
  111. return cfg, nil
  112. }
  113. cfg = &Config{
  114. WorkingDir: workingDir,
  115. MCPServers: make(map[string]MCPServer),
  116. Providers: make(map[models.ModelProvider]Provider),
  117. LSP: make(map[string]LSPConfig),
  118. }
  119. configureViper()
  120. setDefaults(debug)
  121. // Read global config
  122. if err := readConfig(viper.ReadInConfig()); err != nil {
  123. return cfg, err
  124. }
  125. // Load and merge local config
  126. mergeLocalConfig(workingDir)
  127. setProviderDefaults()
  128. // Apply configuration to the struct
  129. if err := viper.Unmarshal(cfg); err != nil {
  130. return cfg, fmt.Errorf("failed to unmarshal config: %w", err)
  131. }
  132. applyDefaultValues()
  133. defaultLevel := slog.LevelInfo
  134. if cfg.Debug {
  135. defaultLevel = slog.LevelDebug
  136. }
  137. lvl.Set(defaultLevel)
  138. slog.SetLogLoggerLevel(defaultLevel)
  139. // Validate configuration
  140. if err := Validate(); err != nil {
  141. return cfg, fmt.Errorf("config validation failed: %w", err)
  142. }
  143. if cfg.Agents == nil {
  144. cfg.Agents = make(map[AgentName]Agent)
  145. }
  146. // Override the max tokens for title agent
  147. cfg.Agents[AgentTitle] = Agent{
  148. Model: cfg.Agents[AgentTitle].Model,
  149. MaxTokens: 80,
  150. }
  151. return cfg, nil
  152. }
  153. // configureViper sets up viper's configuration paths and environment variables.
  154. func configureViper() {
  155. viper.SetConfigName(fmt.Sprintf(".%s", appName))
  156. viper.SetConfigType("json")
  157. viper.AddConfigPath("$HOME")
  158. viper.AddConfigPath(fmt.Sprintf("$XDG_CONFIG_HOME/%s", appName))
  159. viper.AddConfigPath(fmt.Sprintf("$HOME/.config/%s", appName))
  160. viper.SetEnvPrefix(strings.ToUpper(appName))
  161. viper.AutomaticEnv()
  162. }
  163. // setDefaults configures default values for configuration options.
  164. func setDefaults(debug bool) {
  165. viper.SetDefault("data.directory", defaultDataDirectory)
  166. viper.SetDefault("contextPaths", defaultContextPaths)
  167. viper.SetDefault("tui.theme", "opencode")
  168. if debug {
  169. viper.SetDefault("debug", true)
  170. viper.Set("log.level", "debug")
  171. } else {
  172. viper.SetDefault("debug", false)
  173. viper.SetDefault("log.level", defaultLogLevel)
  174. }
  175. }
  176. // setProviderDefaults configures LLM provider defaults based on provider provided by
  177. // environment variables and configuration file.
  178. func setProviderDefaults() {
  179. // Set all API keys we can find in the environment
  180. if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
  181. viper.SetDefault("providers.anthropic.apiKey", apiKey)
  182. }
  183. if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
  184. viper.SetDefault("providers.openai.apiKey", apiKey)
  185. }
  186. if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
  187. viper.SetDefault("providers.gemini.apiKey", apiKey)
  188. }
  189. if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
  190. viper.SetDefault("providers.groq.apiKey", apiKey)
  191. }
  192. if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
  193. viper.SetDefault("providers.openrouter.apiKey", apiKey)
  194. }
  195. if apiKey := os.Getenv("XAI_API_KEY"); apiKey != "" {
  196. viper.SetDefault("providers.xai.apiKey", apiKey)
  197. }
  198. if apiKey := os.Getenv("AZURE_OPENAI_ENDPOINT"); apiKey != "" {
  199. // api-key may be empty when using Entra ID credentials – that's okay
  200. viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
  201. }
  202. // Use this order to set the default models
  203. // 1. Anthropic
  204. // 2. OpenAI
  205. // 3. Google Gemini
  206. // 4. Groq
  207. // 5. OpenRouter
  208. // 6. AWS Bedrock
  209. // 7. Azure
  210. // 8. Google Cloud VertexAI
  211. // Anthropic configuration
  212. if key := viper.GetString("providers.anthropic.apiKey"); strings.TrimSpace(key) != "" {
  213. viper.SetDefault("agents.primary.model", models.Claude37Sonnet)
  214. viper.SetDefault("agents.task.model", models.Claude37Sonnet)
  215. viper.SetDefault("agents.title.model", models.Claude37Sonnet)
  216. return
  217. }
  218. // OpenAI configuration
  219. if key := viper.GetString("providers.openai.apiKey"); strings.TrimSpace(key) != "" {
  220. viper.SetDefault("agents.primary.model", models.GPT41)
  221. viper.SetDefault("agents.task.model", models.GPT41Mini)
  222. viper.SetDefault("agents.title.model", models.GPT41Mini)
  223. return
  224. }
  225. // Google Gemini configuration
  226. if key := viper.GetString("providers.gemini.apiKey"); strings.TrimSpace(key) != "" {
  227. viper.SetDefault("agents.primary.model", models.Gemini25)
  228. viper.SetDefault("agents.task.model", models.Gemini25Flash)
  229. viper.SetDefault("agents.title.model", models.Gemini25Flash)
  230. return
  231. }
  232. // Groq configuration
  233. if key := viper.GetString("providers.groq.apiKey"); strings.TrimSpace(key) != "" {
  234. viper.SetDefault("agents.primary.model", models.QWENQwq)
  235. viper.SetDefault("agents.task.model", models.QWENQwq)
  236. viper.SetDefault("agents.title.model", models.QWENQwq)
  237. return
  238. }
  239. // OpenRouter configuration
  240. if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
  241. viper.SetDefault("agents.primary.model", models.OpenRouterClaude37Sonnet)
  242. viper.SetDefault("agents.task.model", models.OpenRouterClaude37Sonnet)
  243. viper.SetDefault("agents.title.model", models.OpenRouterClaude35Haiku)
  244. return
  245. }
  246. // XAI configuration
  247. if key := viper.GetString("providers.xai.apiKey"); strings.TrimSpace(key) != "" {
  248. viper.SetDefault("agents.primary.model", models.XAIGrok3Beta)
  249. viper.SetDefault("agents.task.model", models.XAIGrok3Beta)
  250. viper.SetDefault("agents.title.model", models.XAiGrok3MiniFastBeta)
  251. return
  252. }
  253. // AWS Bedrock configuration
  254. if hasAWSCredentials() {
  255. viper.SetDefault("agents.primary.model", models.BedrockClaude37Sonnet)
  256. viper.SetDefault("agents.task.model", models.BedrockClaude37Sonnet)
  257. viper.SetDefault("agents.title.model", models.BedrockClaude37Sonnet)
  258. return
  259. }
  260. // Azure OpenAI configuration
  261. if os.Getenv("AZURE_OPENAI_ENDPOINT") != "" {
  262. viper.SetDefault("agents.primary.model", models.AzureGPT41)
  263. viper.SetDefault("agents.task.model", models.AzureGPT41Mini)
  264. viper.SetDefault("agents.title.model", models.AzureGPT41Mini)
  265. return
  266. }
  267. // Google Cloud VertexAI configuration
  268. if hasVertexAICredentials() {
  269. viper.SetDefault("agents.coder.model", models.VertexAIGemini25)
  270. viper.SetDefault("agents.summarizer.model", models.VertexAIGemini25)
  271. viper.SetDefault("agents.task.model", models.VertexAIGemini25Flash)
  272. viper.SetDefault("agents.title.model", models.VertexAIGemini25Flash)
  273. return
  274. }
  275. }
  276. // hasAWSCredentials checks if AWS credentials are available in the environment.
  277. func hasAWSCredentials() bool {
  278. // Check for explicit AWS credentials
  279. if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
  280. return true
  281. }
  282. // Check for AWS profile
  283. if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
  284. return true
  285. }
  286. // Check for AWS region
  287. if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
  288. return true
  289. }
  290. // Check if running on EC2 with instance profile
  291. if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
  292. os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
  293. return true
  294. }
  295. return false
  296. }
  297. // hasVertexAICredentials checks if VertexAI credentials are available in the environment.
  298. func hasVertexAICredentials() bool {
  299. // Check for explicit VertexAI parameters
  300. if os.Getenv("VERTEXAI_PROJECT") != "" && os.Getenv("VERTEXAI_LOCATION") != "" {
  301. return true
  302. }
  303. // Check for Google Cloud project and location
  304. if os.Getenv("GOOGLE_CLOUD_PROJECT") != "" && (os.Getenv("GOOGLE_CLOUD_REGION") != "" || os.Getenv("GOOGLE_CLOUD_LOCATION") != "") {
  305. return true
  306. }
  307. return false
  308. }
  309. // readConfig handles the result of reading a configuration file.
  310. func readConfig(err error) error {
  311. if err == nil {
  312. return nil
  313. }
  314. // It's okay if the config file doesn't exist
  315. if _, ok := err.(viper.ConfigFileNotFoundError); ok {
  316. return nil
  317. }
  318. return fmt.Errorf("failed to read config: %w", err)
  319. }
  320. // mergeLocalConfig loads and merges configuration from the local directory.
  321. func mergeLocalConfig(workingDir string) {
  322. local := viper.New()
  323. local.SetConfigName(fmt.Sprintf(".%s", appName))
  324. local.SetConfigType("json")
  325. local.AddConfigPath(workingDir)
  326. // Merge local config if it exists
  327. if err := local.ReadInConfig(); err == nil {
  328. viper.MergeConfigMap(local.AllSettings())
  329. }
  330. }
  331. // applyDefaultValues sets default values for configuration fields that need processing.
  332. func applyDefaultValues() {
  333. // Set default MCP type if not specified
  334. for k, v := range cfg.MCPServers {
  335. if v.Type == "" {
  336. v.Type = MCPStdio
  337. cfg.MCPServers[k] = v
  338. }
  339. }
  340. }
  341. // It validates model IDs and providers, ensuring they are supported.
  342. func validateAgent(cfg *Config, name AgentName, agent Agent) error {
  343. // Check if model exists
  344. model, modelExists := models.SupportedModels[agent.Model]
  345. if !modelExists {
  346. slog.Warn("unsupported model configured, reverting to default",
  347. "agent", name,
  348. "configured_model", agent.Model)
  349. // Set default model based on available providers
  350. if setDefaultModelForAgent(name) {
  351. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  352. } else {
  353. return fmt.Errorf("no valid provider available for agent %s", name)
  354. }
  355. return nil
  356. }
  357. // Check if provider for the model is configured
  358. provider := model.Provider
  359. providerCfg, providerExists := cfg.Providers[provider]
  360. if !providerExists {
  361. // Provider not configured, check if we have environment variables
  362. apiKey := getProviderAPIKey(provider)
  363. if apiKey == "" {
  364. slog.Warn("provider not configured for model, reverting to default",
  365. "agent", name,
  366. "model", agent.Model,
  367. "provider", provider)
  368. // Set default model based on available providers
  369. if setDefaultModelForAgent(name) {
  370. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  371. } else {
  372. return fmt.Errorf("no valid provider available for agent %s", name)
  373. }
  374. } else {
  375. // Add provider with API key from environment
  376. cfg.Providers[provider] = Provider{
  377. APIKey: apiKey,
  378. }
  379. slog.Info("added provider from environment", "provider", provider)
  380. }
  381. } else if providerCfg.Disabled || providerCfg.APIKey == "" {
  382. // Provider is disabled or has no API key
  383. slog.Warn("provider is disabled or has no API key, reverting to default",
  384. "agent", name,
  385. "model", agent.Model,
  386. "provider", provider)
  387. // Set default model based on available providers
  388. if setDefaultModelForAgent(name) {
  389. slog.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
  390. } else {
  391. return fmt.Errorf("no valid provider available for agent %s", name)
  392. }
  393. }
  394. // Validate max tokens
  395. if agent.MaxTokens <= 0 {
  396. slog.Warn("invalid max tokens, setting to default",
  397. "agent", name,
  398. "model", agent.Model,
  399. "max_tokens", agent.MaxTokens)
  400. // Update the agent with default max tokens
  401. updatedAgent := cfg.Agents[name]
  402. if model.DefaultMaxTokens > 0 {
  403. updatedAgent.MaxTokens = model.DefaultMaxTokens
  404. } else {
  405. updatedAgent.MaxTokens = MaxTokensFallbackDefault
  406. }
  407. cfg.Agents[name] = updatedAgent
  408. } else if model.ContextWindow > 0 && agent.MaxTokens > model.ContextWindow/2 {
  409. // Ensure max tokens doesn't exceed half the context window (reasonable limit)
  410. slog.Warn("max tokens exceeds half the context window, adjusting",
  411. "agent", name,
  412. "model", agent.Model,
  413. "max_tokens", agent.MaxTokens,
  414. "context_window", model.ContextWindow)
  415. // Update the agent with adjusted max tokens
  416. updatedAgent := cfg.Agents[name]
  417. updatedAgent.MaxTokens = model.ContextWindow / 2
  418. cfg.Agents[name] = updatedAgent
  419. }
  420. // Validate reasoning effort for models that support reasoning
  421. if model.CanReason && provider == models.ProviderOpenAI {
  422. if agent.ReasoningEffort == "" {
  423. // Set default reasoning effort for models that support it
  424. slog.Info("setting default reasoning effort for model that supports reasoning",
  425. "agent", name,
  426. "model", agent.Model)
  427. // Update the agent with default reasoning effort
  428. updatedAgent := cfg.Agents[name]
  429. updatedAgent.ReasoningEffort = "medium"
  430. cfg.Agents[name] = updatedAgent
  431. } else {
  432. // Check if reasoning effort is valid (low, medium, high)
  433. effort := strings.ToLower(agent.ReasoningEffort)
  434. if effort != "low" && effort != "medium" && effort != "high" {
  435. slog.Warn("invalid reasoning effort, setting to medium",
  436. "agent", name,
  437. "model", agent.Model,
  438. "reasoning_effort", agent.ReasoningEffort)
  439. // Update the agent with valid reasoning effort
  440. updatedAgent := cfg.Agents[name]
  441. updatedAgent.ReasoningEffort = "medium"
  442. cfg.Agents[name] = updatedAgent
  443. }
  444. }
  445. } else if !model.CanReason && agent.ReasoningEffort != "" {
  446. // Model doesn't support reasoning but reasoning effort is set
  447. slog.Warn("model doesn't support reasoning but reasoning effort is set, ignoring",
  448. "agent", name,
  449. "model", agent.Model,
  450. "reasoning_effort", agent.ReasoningEffort)
  451. // Update the agent to remove reasoning effort
  452. updatedAgent := cfg.Agents[name]
  453. updatedAgent.ReasoningEffort = ""
  454. cfg.Agents[name] = updatedAgent
  455. }
  456. return nil
  457. }
  458. // Validate checks if the configuration is valid and applies defaults where needed.
  459. func Validate() error {
  460. if cfg == nil {
  461. return fmt.Errorf("config not loaded")
  462. }
  463. // Validate agent models
  464. for name, agent := range cfg.Agents {
  465. if err := validateAgent(cfg, name, agent); err != nil {
  466. return err
  467. }
  468. }
  469. // Validate providers
  470. for provider, providerCfg := range cfg.Providers {
  471. if providerCfg.APIKey == "" && !providerCfg.Disabled {
  472. slog.Warn("provider has no API key, marking as disabled", "provider", provider)
  473. providerCfg.Disabled = true
  474. cfg.Providers[provider] = providerCfg
  475. }
  476. }
  477. // Validate LSP configurations
  478. for language, lspConfig := range cfg.LSP {
  479. if lspConfig.Command == "" && !lspConfig.Disabled {
  480. slog.Warn("LSP configuration has no command, marking as disabled", "language", language)
  481. lspConfig.Disabled = true
  482. cfg.LSP[language] = lspConfig
  483. }
  484. }
  485. return nil
  486. }
  487. // getProviderAPIKey gets the API key for a provider from environment variables
  488. func getProviderAPIKey(provider models.ModelProvider) string {
  489. switch provider {
  490. case models.ProviderAnthropic:
  491. return os.Getenv("ANTHROPIC_API_KEY")
  492. case models.ProviderOpenAI:
  493. return os.Getenv("OPENAI_API_KEY")
  494. case models.ProviderGemini:
  495. return os.Getenv("GEMINI_API_KEY")
  496. case models.ProviderGROQ:
  497. return os.Getenv("GROQ_API_KEY")
  498. case models.ProviderAzure:
  499. return os.Getenv("AZURE_OPENAI_API_KEY")
  500. case models.ProviderOpenRouter:
  501. return os.Getenv("OPENROUTER_API_KEY")
  502. case models.ProviderBedrock:
  503. if hasAWSCredentials() {
  504. return "aws-credentials-available"
  505. }
  506. case models.ProviderVertexAI:
  507. if hasVertexAICredentials() {
  508. return "vertex-ai-credentials-available"
  509. }
  510. }
  511. return ""
  512. }
  513. // setDefaultModelForAgent sets a default model for an agent based on available providers
  514. func setDefaultModelForAgent(agent AgentName) bool {
  515. // Check providers in order of preference
  516. if apiKey := os.Getenv("ANTHROPIC_API_KEY"); apiKey != "" {
  517. maxTokens := int64(5000)
  518. if agent == AgentTitle {
  519. maxTokens = 80
  520. }
  521. cfg.Agents[agent] = Agent{
  522. Model: models.Claude37Sonnet,
  523. MaxTokens: maxTokens,
  524. }
  525. return true
  526. }
  527. if apiKey := os.Getenv("OPENAI_API_KEY"); apiKey != "" {
  528. var model models.ModelID
  529. maxTokens := int64(5000)
  530. reasoningEffort := ""
  531. switch agent {
  532. case AgentTitle:
  533. model = models.GPT41Mini
  534. maxTokens = 80
  535. case AgentTask:
  536. model = models.GPT41Mini
  537. default:
  538. model = models.GPT41
  539. }
  540. // Check if model supports reasoning
  541. if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
  542. reasoningEffort = "medium"
  543. }
  544. cfg.Agents[agent] = Agent{
  545. Model: model,
  546. MaxTokens: maxTokens,
  547. ReasoningEffort: reasoningEffort,
  548. }
  549. return true
  550. }
  551. if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
  552. var model models.ModelID
  553. maxTokens := int64(5000)
  554. reasoningEffort := ""
  555. switch agent {
  556. case AgentTitle:
  557. model = models.OpenRouterClaude35Haiku
  558. maxTokens = 80
  559. case AgentTask:
  560. model = models.OpenRouterClaude37Sonnet
  561. default:
  562. model = models.OpenRouterClaude37Sonnet
  563. }
  564. // Check if model supports reasoning
  565. if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
  566. reasoningEffort = "medium"
  567. }
  568. cfg.Agents[agent] = Agent{
  569. Model: model,
  570. MaxTokens: maxTokens,
  571. ReasoningEffort: reasoningEffort,
  572. }
  573. return true
  574. }
  575. if apiKey := os.Getenv("GEMINI_API_KEY"); apiKey != "" {
  576. var model models.ModelID
  577. maxTokens := int64(5000)
  578. if agent == AgentTitle {
  579. model = models.Gemini25Flash
  580. maxTokens = 80
  581. } else {
  582. model = models.Gemini25
  583. }
  584. cfg.Agents[agent] = Agent{
  585. Model: model,
  586. MaxTokens: maxTokens,
  587. }
  588. return true
  589. }
  590. if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
  591. maxTokens := int64(5000)
  592. if agent == AgentTitle {
  593. maxTokens = 80
  594. }
  595. cfg.Agents[agent] = Agent{
  596. Model: models.QWENQwq,
  597. MaxTokens: maxTokens,
  598. }
  599. return true
  600. }
  601. if hasAWSCredentials() {
  602. maxTokens := int64(5000)
  603. if agent == AgentTitle {
  604. maxTokens = 80
  605. }
  606. cfg.Agents[agent] = Agent{
  607. Model: models.BedrockClaude37Sonnet,
  608. MaxTokens: maxTokens,
  609. ReasoningEffort: "medium", // Claude models support reasoning
  610. }
  611. return true
  612. }
  613. if hasVertexAICredentials() {
  614. var model models.ModelID
  615. maxTokens := int64(5000)
  616. if agent == AgentTitle {
  617. model = models.VertexAIGemini25Flash
  618. maxTokens = 80
  619. } else {
  620. model = models.VertexAIGemini25
  621. }
  622. cfg.Agents[agent] = Agent{
  623. Model: model,
  624. MaxTokens: maxTokens,
  625. }
  626. return true
  627. }
  628. return false
  629. }
  630. // Get returns the current configuration.
  631. // It's safe to call this function multiple times.
  632. func Get() *Config {
  633. return cfg
  634. }
  635. // WorkingDirectory returns the current working directory from the configuration.
  636. func WorkingDirectory() string {
  637. if cfg == nil {
  638. panic("config not loaded")
  639. }
  640. return cfg.WorkingDir
  641. }
  642. // GetHostname returns the system hostname or "User" if it can't be determined
  643. func GetHostname() (string, error) {
  644. hostname, err := os.Hostname()
  645. if err != nil {
  646. return "User", err
  647. }
  648. return hostname, nil
  649. }
  650. // GetUsername returns the current user's username
  651. func GetUsername() (string, error) {
  652. currentUser, err := user.Current()
  653. if err != nil {
  654. return "User", err
  655. }
  656. return currentUser.Username, nil
  657. }
  658. func updateCfgFile(updateCfg func(config *Config)) error {
  659. if cfg == nil {
  660. return fmt.Errorf("config not loaded")
  661. }
  662. // Get the config file path
  663. configFile := viper.ConfigFileUsed()
  664. var configData []byte
  665. if configFile == "" {
  666. homeDir, err := os.UserHomeDir()
  667. if err != nil {
  668. return fmt.Errorf("failed to get home directory: %w", err)
  669. }
  670. configFile = filepath.Join(homeDir, fmt.Sprintf(".%s.json", appName))
  671. slog.Info("config file not found, creating new one", "path", configFile)
  672. configData = []byte(`{}`)
  673. } else {
  674. // Read the existing config file
  675. data, err := os.ReadFile(configFile)
  676. if err != nil {
  677. return fmt.Errorf("failed to read config file: %w", err)
  678. }
  679. configData = data
  680. }
  681. // Parse the JSON
  682. var userCfg *Config
  683. if err := json.Unmarshal(configData, &userCfg); err != nil {
  684. return fmt.Errorf("failed to parse config file: %w", err)
  685. }
  686. updateCfg(userCfg)
  687. // Write the updated config back to file
  688. updatedData, err := json.MarshalIndent(userCfg, "", " ")
  689. if err != nil {
  690. return fmt.Errorf("failed to marshal config: %w", err)
  691. }
  692. if err := os.WriteFile(configFile, updatedData, 0o644); err != nil {
  693. return fmt.Errorf("failed to write config file: %w", err)
  694. }
  695. return nil
  696. }
  697. func UpdateAgentModel(agentName AgentName, modelID models.ModelID) error {
  698. if cfg == nil {
  699. panic("config not loaded")
  700. }
  701. existingAgentCfg := cfg.Agents[agentName]
  702. model, ok := models.SupportedModels[modelID]
  703. if !ok {
  704. return fmt.Errorf("model %s not supported", modelID)
  705. }
  706. maxTokens := existingAgentCfg.MaxTokens
  707. if model.DefaultMaxTokens > 0 {
  708. maxTokens = model.DefaultMaxTokens
  709. }
  710. newAgentCfg := Agent{
  711. Model: modelID,
  712. MaxTokens: maxTokens,
  713. ReasoningEffort: existingAgentCfg.ReasoningEffort,
  714. }
  715. cfg.Agents[agentName] = newAgentCfg
  716. if err := validateAgent(cfg, agentName, newAgentCfg); err != nil {
  717. // revert config update on failure
  718. cfg.Agents[agentName] = existingAgentCfg
  719. return fmt.Errorf("failed to update agent model: %w", err)
  720. }
  721. return updateCfgFile(func(config *Config) {
  722. if config.Agents == nil {
  723. config.Agents = make(map[AgentName]Agent)
  724. }
  725. config.Agents[agentName] = newAgentCfg
  726. })
  727. }
  728. // UpdateTheme updates the theme in the configuration and writes it to the config file.
  729. func UpdateTheme(themeName string) error {
  730. if cfg == nil {
  731. return fmt.Errorf("config not loaded")
  732. }
  733. // Update the in-memory config
  734. cfg.TUI.Theme = themeName
  735. // Update the file config
  736. return updateCfgFile(func(config *Config) {
  737. config.TUI.Theme = themeName
  738. })
  739. }