2023-02-26 18:04:05 +01:00
package plugins
2022-12-16 23:02:16 +01:00
import (
"encoding/json"
"errors"
"fmt"
"strings"
2023-10-09 01:16:53 +02:00
2023-06-01 00:15:21 +02:00
log "github.com/sirupsen/logrus"
2023-10-09 01:16:53 +02:00
2023-06-01 00:15:21 +02:00
"github.com/go-resty/resty/v2"
2022-12-16 23:02:16 +01:00
)
2023-06-26 11:55:49 -05:00
const (
// Reference: https://www.engraved.blog/building-a-virtual-machine-inside/
promptVirtualizeLinuxTerminal = "I want you to act as a Linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. Do no write explanations. Do not type commands unless I instruct you to do so.\n\nA:pwd\n\nQ:/home/user\n\n"
2023-10-09 01:16:53 +02:00
ChatGPTPluginName = "OpenAIGPTLinuxTerminal"
openAIGPTEndpoint = "https://api.openai.com/v1/completions"
)
2022-12-16 23:02:16 +01:00
type History struct {
Input , Output string
}
2023-10-09 01:16:53 +02:00
type openAIGPTVirtualTerminal struct {
Histories [ ] History
openAIKey string
client * resty . Client
2022-12-16 23:02:16 +01:00
}
type Choice struct {
Text string ` json:"text" `
Index int ` json:"index" `
Logprobs interface { } ` json:"logprobs" `
FinishReason string ` json:"finish_reason" `
}
type gptResponse struct {
ID string ` json:"id" `
Object string ` json:"object" `
Created int ` json:"created" `
Model string ` json:"model" `
Choices [ ] Choice ` json:"choices" `
Usage struct {
PromptTokens int ` json:"prompt_tokens" `
CompletionTokens int ` json:"completion_tokens" `
TotalTokens int ` json:"total_tokens" `
} ` json:"usage" `
}
type gptRequest struct {
Model string ` json:"model" `
Prompt string ` json:"prompt" `
Temperature int ` json:"temperature" `
MaxTokens int ` json:"max_tokens" `
TopP int ` json:"top_p" `
FrequencyPenalty int ` json:"frequency_penalty" `
PresencePenalty int ` json:"presence_penalty" `
Stop [ ] string ` json:"stop" `
}
2023-10-09 01:16:53 +02:00
func Init ( history [ ] History , openAIKey string ) * openAIGPTVirtualTerminal {
return & openAIGPTVirtualTerminal {
Histories : history ,
openAIKey : openAIKey ,
client : resty . New ( ) ,
}
}
2022-12-16 23:02:16 +01:00
func buildPrompt ( histories [ ] History , command string ) string {
var sb strings . Builder
2023-01-22 13:00:52 +01:00
sb . WriteString ( promptVirtualizeLinuxTerminal )
2022-12-16 23:02:16 +01:00
for _ , history := range histories {
sb . WriteString ( fmt . Sprintf ( "A:%s\n\nQ:%s\n\n" , history . Input , history . Output ) )
}
// Append command to evaluate
sb . WriteString ( fmt . Sprintf ( "A:%s\n\nQ:" , command ) )
return sb . String ( )
}
2023-10-09 01:16:53 +02:00
func ( openAIGPTVirtualTerminal * openAIGPTVirtualTerminal ) GetCompletions ( command string ) ( string , error ) {
2022-12-16 23:02:16 +01:00
requestJson , err := json . Marshal ( gptRequest {
Model : "text-davinci-003" ,
Prompt : buildPrompt ( openAIGPTVirtualTerminal . Histories , command ) ,
Temperature : 0 ,
MaxTokens : 100 ,
TopP : 1 ,
FrequencyPenalty : 0 ,
PresencePenalty : 0 ,
Stop : [ ] string { "\n" } ,
} )
if err != nil {
return "" , err
}
2023-10-09 01:16:53 +02:00
if openAIGPTVirtualTerminal . openAIKey == "" {
return "" , errors . New ( "openAIKey is empty" )
2022-12-16 23:02:16 +01:00
}
response , err := openAIGPTVirtualTerminal . client . R ( ) .
SetHeader ( "Content-Type" , "application/json" ) .
SetBody ( requestJson ) .
2023-10-09 01:16:53 +02:00
SetAuthToken ( openAIGPTVirtualTerminal . openAIKey ) .
2022-12-16 23:02:16 +01:00
SetResult ( & gptResponse { } ) .
Post ( openAIGPTEndpoint )
if err != nil {
return "" , err
}
2023-06-01 00:15:21 +02:00
log . Debug ( response )
2022-12-16 23:02:16 +01:00
if len ( response . Result ( ) . ( * gptResponse ) . Choices ) == 0 {
return "" , errors . New ( "no choices" )
}
return response . Result ( ) . ( * gptResponse ) . Choices [ 0 ] . Text , nil
}