2023-11-12 02:41:24 +01:00
package ai
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
2024-08-11 20:57:27 +02:00
"strconv"
2023-11-12 02:41:24 +01:00
"github.com/ddworken/hishtory/client/hctx"
2024-08-11 21:19:41 +02:00
2023-11-12 02:41:24 +01:00
"golang.org/x/exp/slices"
)
2024-03-27 06:13:57 +01:00
const DefaultOpenAiEndpoint = "https://api.openai.com/v1/chat/completions"
2023-11-12 02:41:24 +01:00
type openAiRequest struct {
Model string ` json:"model" `
Messages [ ] openAiMessage ` json:"messages" `
NumberCompletions int ` json:"n" `
}
type openAiMessage struct {
Role string ` json:"role" `
Content string ` json:"content" `
}
type openAiResponse struct {
Id string ` json:"id" `
Object string ` json:"object" `
Created int ` json:"created" `
Model string ` json:"model" `
2023-11-12 05:41:59 +01:00
Usage OpenAiUsage ` json:"usage" `
2023-11-12 02:41:24 +01:00
Choices [ ] openAiChoice ` json:"choices" `
}
type openAiChoice struct {
Index int ` json:"index" `
Message openAiMessage ` json:"message" `
FinishReason string ` json:"finish_reason" `
}
2023-11-12 05:41:59 +01:00
type OpenAiUsage struct {
2023-11-12 02:41:24 +01:00
PromptTokens int ` json:"prompt_tokens" `
CompletionTokens int ` json:"completion_tokens" `
TotalTokens int ` json:"total_tokens" `
}
2023-11-12 06:13:03 +01:00
type TestOnlyOverrideAiSuggestionRequest struct {
Query string ` json:"query" `
Suggestions [ ] string ` json:"suggestions" `
}
2023-11-12 05:59:45 +01:00
var TestOnlyOverrideAiSuggestions map [ string ] [ ] string = make ( map [ string ] [ ] string )
2023-11-12 02:41:24 +01:00
2024-08-11 21:15:44 +02:00
func GetAiSuggestionsViaOpenAiApi ( apiEndpoint , query , shellName , osName , overriddenOpenAiModel string , numberCompletions int ) ( [ ] string , OpenAiUsage , error ) {
2023-11-12 05:59:45 +01:00
if results := TestOnlyOverrideAiSuggestions [ query ] ; len ( results ) > 0 {
2023-11-12 05:41:59 +01:00
return results , OpenAiUsage { } , nil
2023-11-12 02:41:24 +01:00
}
hctx . GetLogger ( ) . Infof ( "Running OpenAI query for %#v" , query )
apiKey := os . Getenv ( "OPENAI_API_KEY" )
2024-03-27 06:13:57 +01:00
if apiKey == "" && apiEndpoint == DefaultOpenAiEndpoint {
2023-11-12 05:41:59 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "OPENAI_API_KEY environment variable is not set" )
2023-11-12 02:41:24 +01:00
}
2024-08-11 21:15:44 +02:00
apiReqStr , err := json . Marshal ( createOpenAiRequest ( query , shellName , osName , overriddenOpenAiModel , numberCompletions ) )
2023-11-12 02:41:24 +01:00
if err != nil {
2023-11-12 05:41:59 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "failed to serialize JSON for OpenAI API: %w" , err )
2023-11-12 02:41:24 +01:00
}
2024-08-11 20:57:27 +02:00
req , err := http . NewRequest ( http . MethodPost , apiEndpoint , bytes . NewBuffer ( apiReqStr ) )
2023-11-12 02:41:24 +01:00
if err != nil {
2023-11-12 05:41:59 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "failed to create OpenAI API request: %w" , err )
2023-11-12 02:41:24 +01:00
}
req . Header . Set ( "Content-Type" , "application/json" )
2024-03-27 06:13:57 +01:00
if apiKey != "" {
req . Header . Set ( "Authorization" , "Bearer " + apiKey )
}
2024-08-11 20:20:29 +02:00
resp , err := http . DefaultClient . Do ( req )
2023-11-12 02:41:24 +01:00
if err != nil {
2023-11-12 05:41:59 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "failed to query OpenAI API: %w" , err )
2023-11-12 02:41:24 +01:00
}
defer resp . Body . Close ( )
bodyText , err := io . ReadAll ( resp . Body )
if err != nil {
2023-11-12 05:41:59 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "failed to read OpenAI API response: %w" , err )
2023-11-12 02:41:24 +01:00
}
2024-02-07 05:02:09 +01:00
if resp . StatusCode == 429 {
return nil , OpenAiUsage { } , fmt . Errorf ( "received 429 error code from OpenAI (is your API key valid?)" )
}
2023-11-12 02:41:24 +01:00
var apiResp openAiResponse
err = json . Unmarshal ( bodyText , & apiResp )
if err != nil {
2024-08-11 20:57:27 +02:00
return nil , OpenAiUsage { } , fmt . Errorf ( "failed to parse OpenAI API response=%#v: %w" , string ( bodyText ) , err )
2023-11-12 02:41:24 +01:00
}
if len ( apiResp . Choices ) == 0 {
2024-02-06 06:34:03 +01:00
return nil , OpenAiUsage { } , fmt . Errorf ( "OpenAI API returned zero choices, parsed resp=%#v, resp body=%#v, resp.StatusCode=%d" , apiResp , bodyText , resp . StatusCode )
2023-11-12 02:41:24 +01:00
}
ret := make ( [ ] string , 0 )
for _ , item := range apiResp . Choices {
if ! slices . Contains ( ret , item . Message . Content ) {
ret = append ( ret , item . Message . Content )
}
}
hctx . GetLogger ( ) . Infof ( "For OpenAI query=%#v ==> %#v" , query , ret )
2023-11-12 05:41:59 +01:00
return ret , apiResp . Usage , nil
2023-11-12 02:41:24 +01:00
}
type AiSuggestionRequest struct {
DeviceId string ` json:"device_id" `
UserId string ` json:"user_id" `
Query string ` json:"query" `
NumberCompletions int ` json:"number_completions" `
2023-12-20 01:49:31 +01:00
ShellName string ` json:"shell_name" `
OsName string ` json:"os_name" `
2024-08-11 21:15:44 +02:00
Model string ` json:"model" `
2023-11-12 02:41:24 +01:00
}
type AiSuggestionResponse struct {
Suggestions [ ] string ` json:"suggestions" `
}
2024-08-11 20:57:27 +02:00
2024-08-11 21:15:44 +02:00
func createOpenAiRequest ( query , shellName , osName , overriddenOpenAiModel string , numberCompletions int ) openAiRequest {
2024-08-11 20:57:27 +02:00
if osName == "" {
osName = "Linux"
}
if shellName == "" {
shellName = "bash"
}
2024-08-11 21:15:44 +02:00
// According to https://platform.openai.com/docs/models gpt-4o-mini is the best model
// by performance/price ratio.
model := "gpt-4o-mini"
if envModel := os . Getenv ( "OPENAI_API_MODEL" ) ; envModel != "" {
model = envModel
}
if overriddenOpenAiModel != "" {
model = overriddenOpenAiModel
2024-08-11 20:57:27 +02:00
}
if envNumberCompletions := os . Getenv ( "OPENAI_API_NUMBER_COMPLETIONS" ) ; envNumberCompletions != "" {
n , err := strconv . Atoi ( envNumberCompletions )
if err == nil {
numberCompletions = n
}
}
defaultSystemPrompt := "You are an expert programmer that loves to help people with writing shell commands. " +
"You always reply with just a shell command and no additional context, information, or formatting. " +
"Your replies will be directly executed in " + shellName + " on " + osName +
", so ensure that they are correct and do not contain anything other than a shell command."
if systemPrompt := os . Getenv ( "OPENAI_API_SYSTEM_PROMPT" ) ; systemPrompt != "" {
defaultSystemPrompt = systemPrompt
}
return openAiRequest {
Model : model ,
NumberCompletions : numberCompletions ,
Messages : [ ] openAiMessage {
{ Role : "system" , Content : defaultSystemPrompt } ,
{ Role : "user" , Content : query } ,
} ,
}
}