mirror of
https://github.com/ddworken/hishtory.git
synced 2024-12-27 09:18:55 +01:00
ai: add some new env variables to control OpenAI requests (#231)
Co-authored-by: David Dworken <david@daviddworken.com>
This commit is contained in:
parent
cc65735b0e
commit
5056fb5527
@ -56,7 +56,7 @@ func getOsName() string {
|
||||
}
|
||||
|
||||
func GetAiSuggestionsViaHishtoryApi(ctx context.Context, shellName, query string, numberCompletions int) ([]string, error) {
|
||||
hctx.GetLogger().Infof("Running OpenAI query for %#v", query)
|
||||
hctx.GetLogger().Infof("Running OpenAI query for %#v via hishtory server", query)
|
||||
req := ai.AiSuggestionRequest{
|
||||
DeviceId: hctx.GetConf(ctx).DeviceId,
|
||||
UserId: data.UserId(hctx.GetConf(ctx).UserSecret),
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/ddworken/hishtory/client/hctx"
|
||||
"golang.org/x/exp/slices"
|
||||
@ -58,32 +59,15 @@ func GetAiSuggestionsViaOpenAiApi(apiEndpoint, query, shellName, osName string,
|
||||
return results, OpenAiUsage{}, nil
|
||||
}
|
||||
hctx.GetLogger().Infof("Running OpenAI query for %#v", query)
|
||||
if osName == "" {
|
||||
osName = "Linux"
|
||||
}
|
||||
if shellName == "" {
|
||||
shellName = "bash"
|
||||
}
|
||||
apiKey := os.Getenv("OPENAI_API_KEY")
|
||||
if apiKey == "" && apiEndpoint == DefaultOpenAiEndpoint {
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("OPENAI_API_KEY environment variable is not set")
|
||||
}
|
||||
apiReq := openAiRequest{
|
||||
Model: "gpt-3.5-turbo",
|
||||
NumberCompletions: numberCompletions,
|
||||
Messages: []openAiMessage{
|
||||
{Role: "system", Content: "You are an expert programmer that loves to help people with writing shell commands. " +
|
||||
"You always reply with just a shell command and no additional context, information, or formatting. " +
|
||||
"Your replies will be directly executed in " + shellName + " on " + osName +
|
||||
", so ensure that they are correct and do not contain anything other than a shell command."},
|
||||
{Role: "user", Content: query},
|
||||
},
|
||||
}
|
||||
apiReqStr, err := json.Marshal(apiReq)
|
||||
apiReqStr, err := json.Marshal(createOpenAiRequest(query, shellName, osName, numberCompletions))
|
||||
if err != nil {
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("failed to serialize JSON for OpenAI API: %w", err)
|
||||
}
|
||||
req, err := http.NewRequest("POST", apiEndpoint, bytes.NewBuffer(apiReqStr))
|
||||
req, err := http.NewRequest(http.MethodPost, apiEndpoint, bytes.NewBuffer(apiReqStr))
|
||||
if err != nil {
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("failed to create OpenAI API request: %w", err)
|
||||
}
|
||||
@ -106,7 +90,7 @@ func GetAiSuggestionsViaOpenAiApi(apiEndpoint, query, shellName, osName string,
|
||||
var apiResp openAiResponse
|
||||
err = json.Unmarshal(bodyText, &apiResp)
|
||||
if err != nil {
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("failed to parse OpenAI API response=%#v: %w", bodyText, err)
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("failed to parse OpenAI API response=%#v: %w", string(bodyText), err)
|
||||
}
|
||||
if len(apiResp.Choices) == 0 {
|
||||
return nil, OpenAiUsage{}, fmt.Errorf("OpenAI API returned zero choices, parsed resp=%#v, resp body=%#v, resp.StatusCode=%d", apiResp, bodyText, resp.StatusCode)
|
||||
@ -133,3 +117,44 @@ type AiSuggestionRequest struct {
|
||||
type AiSuggestionResponse struct {
|
||||
Suggestions []string `json:"suggestions"`
|
||||
}
|
||||
|
||||
func createOpenAiRequest(query, shellName, osName string, numberCompletions int) openAiRequest {
|
||||
if osName == "" {
|
||||
osName = "Linux"
|
||||
}
|
||||
if shellName == "" {
|
||||
shellName = "bash"
|
||||
}
|
||||
|
||||
model := os.Getenv("OPENAI_API_MODEL")
|
||||
if model == "" {
|
||||
// According to https://platform.openai.com/docs/models gpt-4o-mini is the best model
|
||||
// by performance/price ratio.
|
||||
model = "gpt-4o-mini"
|
||||
}
|
||||
|
||||
if envNumberCompletions := os.Getenv("OPENAI_API_NUMBER_COMPLETIONS"); envNumberCompletions != "" {
|
||||
n, err := strconv.Atoi(envNumberCompletions)
|
||||
if err == nil {
|
||||
numberCompletions = n
|
||||
}
|
||||
}
|
||||
|
||||
defaultSystemPrompt := "You are an expert programmer that loves to help people with writing shell commands. " +
|
||||
"You always reply with just a shell command and no additional context, information, or formatting. " +
|
||||
"Your replies will be directly executed in " + shellName + " on " + osName +
|
||||
", so ensure that they are correct and do not contain anything other than a shell command."
|
||||
|
||||
if systemPrompt := os.Getenv("OPENAI_API_SYSTEM_PROMPT"); systemPrompt != "" {
|
||||
defaultSystemPrompt = systemPrompt
|
||||
}
|
||||
|
||||
return openAiRequest{
|
||||
Model: model,
|
||||
NumberCompletions: numberCompletions,
|
||||
Messages: []openAiMessage{
|
||||
{Role: "system", Content: defaultSystemPrompt},
|
||||
{Role: "user", Content: query},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user