-
Notifications
You must be signed in to change notification settings - Fork 1
/
ai.go
99 lines (86 loc) · 2.22 KB
/
ai.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
package main
import (
"context"
"encoding/json"
"github.com/sashabaranov/go-openai"
)
type AIClient struct {
client *openai.Client
model string
}
func NewAIClient(apiKey, model string) *AIClient {
return &AIClient{
client: openai.NewClient(apiKey),
model: model,
}
}
func (ai *AIClient) ChatCompletion(messages []Message) (string, error) {
var openaiMessages []openai.ChatCompletionMessage
for _, msg := range messages {
openaiMessages = append(openaiMessages, openai.ChatCompletionMessage{
Role: msg.Role,
Content: msg.Content,
})
}
resp, err := ai.client.CreateChatCompletion(
context.Background(),
openai.ChatCompletionRequest{
Model: ai.model,
Messages: openaiMessages,
},
)
if err != nil {
return "", err
}
return resp.Choices[0].Message.Content, nil
}
func (ai *AIClient) ChatCompletionStream(messages []Message) (*openai.ChatCompletionStream, error) {
var oaiMessages []openai.ChatCompletionMessage
for _, msg := range messages {
oaiMessages = append(oaiMessages, openai.ChatCompletionMessage{
Role: msg.Role,
Content: msg.Content,
})
}
returnCommandFunction := openai.FunctionDefinition{
Name: "return_command",
Parameters: json.RawMessage(`{
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "The full command to be executed"
},
"binaries": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of required binaries for the command"
}
},
"required": ["command"]
}`),
Description: "Return a command to be executed along with any required binaries",
}
ctx := context.Background()
req := openai.ChatCompletionRequest{
Model: ai.model,
Messages: oaiMessages,
Stream: true,
Functions: []openai.FunctionDefinition{returnCommandFunction},
FunctionCall: openai.FunctionCall{Name: "return_command"},
}
return ai.client.CreateChatCompletionStream(ctx, req)
}
func (ai *AIClient) GetAvailableModels() ([]string, error) {
modelList, err := ai.client.ListModels(context.Background())
if err != nil {
return nil, err
}
var models []string
for _, model := range modelList.Models {
models = append(models, model.ID)
}
return models, nil
}