diff --git a/README.md b/README.md index 1f5905c..21f574a 100644 --- a/README.md +++ b/README.md @@ -125,8 +125,9 @@ preferences: models: - name: gpt-4-1106-preview endpoint: https://api.openai.com/v1/chat/completions - auth_env_var: OPENAI_API_KEY - org_env_var: OPENAI_ORG_ID + api_key: ${OPENAI_API_KEY} + org_id: ${OPENAI_ORG_ID} + project_id: ${OPENAI_PROJECT_ID} prompt: [ { @@ -141,7 +142,7 @@ models: config_format_version: "1" ```` -**Note:** The `auth_env_var` is set to `OPENAI_API_KEY` verbatim, not the key itself, so as to not keep sensitive information in the config file. +**Note:** The `api_key` references an environment variable by default, not the key itself. ### Setting Up a Local Model @@ -163,8 +164,9 @@ Here's what I did: models: - name: stablelm-zephyr-3b.Q8_0 endpoint: http://127.0.0.1:8080/v1/chat/completions - auth_env_var: OPENAI_API_KEY - org_env_var: OPENAI_ORG_ID + api_key: ${OPENAI_API_KEY} + org_id: ${OPENAI_ORG_ID} + project_id: ${OPENAI_PROJECT_ID} prompt: - role: system content: @@ -212,7 +214,7 @@ Define `AZURE_OPENAI_API_KEY` environment variable and make few changes to the c models: - name: azure-gpt-4 endpoint: https://.openai.azure.com/openai/deployments//chat/completions?api-version= - auth_env_var: AZURE_OPENAI_API_KEY + api_key: ${AZURE_OPENAI_API_KEY} ``` ### I Fucked Up The Config File diff --git a/cli/cli.go b/cli/cli.go index ea4a744..15cf526 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -8,6 +8,7 @@ import ( . "q/types" "q/util" + "regexp" "runtime" "strings" @@ -295,24 +296,25 @@ func initialModel(prompt string, client *llm.LLMClient) model { // === Main === // +var envVarNameRegex = regexp.MustCompile(`\$\{?([a-zA-Z_][a-zA-Z0-9_]*)\}?`) + func printAPIKeyNotSetMessage(modelConfig ModelConfig) { - auth := modelConfig.Auth + auth := modelConfig.ApiKey.Raw() r, _ := glamour.NewTermRenderer( glamour.WithAutoStyle(), ) - - profileScriptName := ".zshrc or.bashrc" - shellSyntax := "\n```bash\nexport OPENAI_API_KEY=[your key]\n```" - if runtime.GOOS == "windows" { - profileScriptName = "$profile" - shellSyntax = "\n```powershell\n$env:OPENAI_API_KEY = \"[your key]\"\n```" - } - styleRed := lipgloss.NewStyle().Foreground(lipgloss.Color("9")) - switch auth { - case "OPENAI_API_KEY": - msg1 := styleRed.Render("OPENAI_API_KEY environment variable not set.") + if envVarNameRegex.MatchString(auth) { + varName := envVarNameRegex.ReplaceAllString(auth, "$1") + profileScriptName := ".zshrc or.bashrc" + shellSyntax := fmt.Sprintf("\n```bash\nexport %s=[your key]\n```", varName) + if runtime.GOOS == "windows" { + profileScriptName = "$profile" + shellSyntax = fmt.Sprintf("\n```powershell\n$env:%s = \"[your key]\"\n```", varName) + } + + msg1 := styleRed.Render(fmt.Sprintf("%s environment variable not set.", varName)) // make it platform agnostic message_string := fmt.Sprintf(` @@ -324,8 +326,8 @@ func printAPIKeyNotSetMessage(modelConfig ModelConfig) { msg2, _ := r.Render(message_string) fmt.Printf("\n %v%v\n", msg1, msg2) - default: - msg := styleRed.Render(auth + " environment variable not set.") + } else { + msg := styleRed.Render("api_key value not set in config.") fmt.Printf("\n %v", msg) } } @@ -357,12 +359,12 @@ func runQProgram(prompt string) { } modelConfig, err := getModelConfig(appConfig) + if err != nil { config.PrintConfigErrorMessage(err) os.Exit(1) } - auth := os.Getenv(modelConfig.Auth) - if auth == "" || os.Getenv(modelConfig.Auth) == "" { + if modelConfig.ApiKey.Resolve() == "" { printAPIKeyNotSetMessage(modelConfig) os.Exit(1) } @@ -370,10 +372,6 @@ func runQProgram(prompt string) { // TODO: maybe add a validating function config.SaveAppConfig(appConfig) - orgID := os.Getenv(modelConfig.OrgID) - modelConfig.Auth = auth - modelConfig.OrgID = orgID - c := llm.NewLLMClient(modelConfig) p := tea.NewProgram(initialModel(prompt, c)) c.StreamCallback = streamHandler(p) diff --git a/config/cli.go b/config/cli.go index cf9fd99..1340ce1 100644 --- a/config/cli.go +++ b/config/cli.go @@ -399,10 +399,7 @@ func modelDetailsForModelMenu(appConfig AppConfig, modelConfig types.ModelConfig title: "Endpoint: " + modelConfig.Endpoint, }, { - title: "Auth: " + modelConfig.Auth, - }, - { - title: "Auth: " + modelConfig.Auth, + title: "Auth: " + modelConfig.ApiKey.Raw(), }, { title: "Prompt", diff --git a/config/config.go b/config/config.go index e2d692a..4b10227 100644 --- a/config/config.go +++ b/config/config.go @@ -96,6 +96,12 @@ func loadExistingConfig(filePath string) (AppConfig, error) { if err != nil { return config, fmt.Errorf("error unmarshalling config file: %s", err) } + if config.Version == "1" { + for i, modelCfg := range config.Models { + config.Models[i] = modelCfg.Migrate() + } + config.Version = "2" + } return config, nil } diff --git a/config/config.yaml b/config/config.yaml index 2f747b7..44e41e7 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -4,8 +4,9 @@ preferences: models: - name: gpt-4 endpoint: https://api.openai.com/v1/chat/completions - auth_env_var: OPENAI_API_KEY - org_env_var: OPENAI_ORG_ID + api_key: ${OPENAI_API_KEY} + org_id: ${OPENAI_ORG_ID} + project_id: ${OPENAI_PROJECT_ID} prompt: [ { @@ -18,8 +19,9 @@ models: - name: gpt-4-1106-preview endpoint: https://api.openai.com/v1/chat/completions - auth_env_var: OPENAI_API_KEY - org_env_var: OPENAI_ORG_ID + api_key: ${OPENAI_API_KEY} + org_id: ${OPENAI_ORG_ID} + project_id: ${OPENAI_PROJECT_ID} prompt: [ { @@ -32,8 +34,9 @@ models: - name: gpt-3.5-turbo endpoint: https://api.openai.com/v1/chat/completions - auth_env_var: OPENAI_API_KEY - org_env_var: OPENAI_ORG_ID + api_key: ${OPENAI_API_KEY} + org_id: ${OPENAI_ORG_ID} + project_id: ${OPENAI_PROJECT_ID} prompt: [ { @@ -49,4 +52,4 @@ models: { role: "assistant", content: "```bash\necho \"hi\"\n```" }, ] -config_format_version: "1" +config_format_version: "2" diff --git a/llm/llm.go b/llm/llm.go index 35e43e5..233331d 100644 --- a/llm/llm.go +++ b/llm/llm.go @@ -40,13 +40,19 @@ func (c *LLMClient) createRequest(payload Payload) (*http.Request, error) { if err != nil { return nil, fmt.Errorf("failed to create request: %w", err) } + apiKey := c.config.ApiKey.Resolve() if strings.Contains(c.config.Endpoint, "openai.azure.com") { - req.Header.Set("Api-Key", c.config.Auth) + req.Header.Set("Api-Key", apiKey) } else { - req.Header.Set("Authorization", "Bearer "+c.config.Auth) + req.Header.Set("Authorization", "Bearer "+apiKey) } - if c.config.OrgID != "" { - req.Header.Set("OpenAI-Organization", c.config.OrgID) + orgID := c.config.OrgID.Resolve() + if orgID != "" { + req.Header.Set("OpenAI-Organization", orgID) + } + projectID := c.config.ProjectID.Resolve() + if projectID != "" { + req.Header.Set("OpenAI-Project", projectID) } req.Header.Set("Content-Type", "application/json") return req, nil diff --git a/types/types.go b/types/types.go index 525fb94..c7b973d 100644 --- a/types/types.go +++ b/types/types.go @@ -1,11 +1,43 @@ package types +import ( + "fmt" + "os" +) + +type ValueOrVar string + +func (v ValueOrVar) Raw() string { + return string(v) +} + +func (v ValueOrVar) Resolve() string { + return os.ExpandEnv(string(v)) +} + type ModelConfig struct { - ModelName string `yaml:"name"` - Endpoint string `yaml:"endpoint"` - Auth string `yaml:"auth_env_var"` - OrgID string `yaml:"org_env_var,omitempty"` - Prompt []Message `yaml:"prompt"` + ModelName string `yaml:"name"` + Endpoint string `yaml:"endpoint"` + ApiKey ValueOrVar `yaml:"api_key,omitempty"` + OrgID ValueOrVar `yaml:"org_id,omitempty"` + ProjectID ValueOrVar `yaml:"project_id,omitempty"` + Prompt []Message `yaml:"prompt"` + // deprecated var-only keys + V1_Auth string `yaml:"auth_env_var,omitempty"` + V1_OrgID string `yaml:"org_env_var,omitempty"` + V1_ProjectID string `yaml:"project_env_var,omitempty"` +} + +func (c ModelConfig) Migrate() ModelConfig { + c.ApiKey = ValueOrVar(fmt.Sprintf("${%s}", c.V1_Auth)) + c.V1_Auth = "" + + c.OrgID = ValueOrVar(fmt.Sprintf("${%s}", c.V1_OrgID)) + c.V1_OrgID = "" + + c.ProjectID = ValueOrVar("${OPENAI_PROJECT_ID}") + + return c } type Message struct {