Skip to content
This repository has been archived by the owner on Jan 21, 2024. It is now read-only.

Commit

Permalink
Merge pull request #26 from dabumana/v.0.2.2
Browse files Browse the repository at this point in the history
v.0.2.2
  • Loading branch information
dabumana committed Apr 22, 2023
2 parents 3b1d1c6 + 2fed89a commit 78b7214
Show file tree
Hide file tree
Showing 831 changed files with 360,616 additions and 144 deletions.
3 changes: 0 additions & 3 deletions .github/workflows/go.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,3 @@ jobs:

- name: Test
run: make test

- name: Coverage
run: make coverage
2 changes: 1 addition & 1 deletion src/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ require (
github.com/subosito/gotenv v1.4.2 // indirect
go.starlark.net v0.0.0-20220816155156-cfacd8902214 // indirect
golang.org/x/arch v0.0.0-20190927153633-4e8777c89be4 // indirect
golang.org/x/net v0.4.0 // indirect
golang.org/x/net v0.7.0 // indirect
golang.org/x/sys v0.3.0 // indirect
golang.org/x/term v0.3.0 // indirect
golang.org/x/text v0.5.0 // indirect
Expand Down
5 changes: 1 addition & 4 deletions src/makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,11 @@ clean:
rm -rf 'export'
rm -rf 'training'

coverage:
go test --cover ./...

run: build
./bin/${APP}-${VERSION}

test:
go test ./...
go test -cover ./...

update:
go mod tidy
61 changes: 40 additions & 21 deletions src/service/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ package service

import (
"context"
"encoding/csv"
"fmt"
"io/ioutil"
"net/http"
"os"
"time"
Expand All @@ -27,22 +27,25 @@ type Agent struct {
// Assistant context
templateID []string
templateCtx []string
// Client context
ctx context.Context
client gpt3.Client
exClient *http.Client
// Context
ctx context.Context
// Client
client *gpt3.Client
exClient *http.Client
// Properties
engineProperties model.EngineProperties
promptProperties model.PromptProperties
predictProperties model.PredictProperties
preferences parameters.GlobalPreferences
// Preferences
preferences parameters.GlobalPreferences
// Temporal cache
cachedPrompt string
}

// Initialize - Creates context background to be used along with the client
func (c *Agent) Initialize() Agent {
// ID
c.version = "v.0.2.0"
c.version = "v.0.2.2"
c.id = "anon"
// Key
c.key = getKeys()
Expand All @@ -54,6 +57,7 @@ func (c *Agent) Initialize() Agent {
// Role
c.preferences.Role = model.Assistant
// Global preferences
c.preferences.TemplateIDs = len(c.templateID)
c.preferences.Template = 0
c.preferences.Engine = "text-davinci-003"
c.preferences.Frequency = util.ParseFloat32("\u0030\u002e\u0035")
Expand All @@ -80,7 +84,7 @@ func (c *Agent) Initialize() Agent {
}

// Connect - Contextualize the API to create a new client
func (c *Agent) Connect() (gpt3.Client, *http.Client) {
func (c *Agent) Connect() (*gpt3.Client, *http.Client) {
godotenv.Load()

externalClient := http.Client{
Expand All @@ -91,17 +95,19 @@ func (c *Agent) Connect() (gpt3.Client, *http.Client) {
option := gpt3.WithHTTPClient(&externalClient)
client := gpt3.NewClient(c.key[0], option)

c.client = client
c.client = &client
c.exClient = &externalClient

return c.client, c.exClient
}

// SaveKeys - Set API keys
func (c *Agent) SaveKeys() {
var event EventManager
event := &EventManager{}

dir, _ := os.Getwd()
path := fmt.Sprint(dir, ".env")

_, err := os.Open(path)
if err != nil {
file, err := os.Create(".env")
Expand All @@ -114,13 +120,21 @@ func (c *Agent) SaveKeys() {
}
}

// GetStatus - Current agent information
func (c *Agent) GetStatus() parameters.GlobalPreferences {
return c.preferences
}

// getKeys - Grab API keys
func getKeys() []string {
file, _ := os.Open(".env")
dir, _ := os.Getwd()
path := fmt.Sprintf("%v/.env", dir)

file, _ := os.Stat(path)
if file != nil {
return getKeyFromEnv()
return getKeyFromLocal()
}
return getKeyFromLocal()
return getKeyFromEnv()
}

// getKeyFromEnv - Get environment keys
Expand Down Expand Up @@ -173,14 +187,19 @@ func getTemplateFromLocal() ([]string, []string) {
var context []string

dir, _ := os.Getwd()
path := dir + "/template/"
reader, _ := ioutil.ReadDir(path)

for _, file := range reader {
index = append(index, file.Name())
out, _ := ioutil.ReadFile(path + file.Name())
if out != nil {
context = append(context, string(out))
path := fmt.Sprintf("%v/template/role.csv", dir)

file, _ := os.Open(path)
reader := csv.NewReader(file)
data, _ := reader.ReadAll()

for _, j := range data {
for k, l := range j {
if k == 0 {
index = append(index, l)
} else if k == 1 {
context = append(context, l)
}
}
}

Expand Down
4 changes: 3 additions & 1 deletion src/service/node.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ import (
)

// node - Global node service
var node Node
var (
node Node
)

// Node - Node manager
type Node struct {
Expand Down
1 change: 1 addition & 0 deletions src/service/parameters/global.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
// GlobalPreferences - General
type GlobalPreferences struct {
// Engine properties
TemplateIDs int
Template int
Engine string
Mode string
Expand Down
21 changes: 14 additions & 7 deletions src/service/prompt.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ func (c *Prompt) SendStreamingChatCompletion(service Agent) *gpt3.ChatCompletion
buffer = append(buffer, "\n")

fmt.Print("\033[H\033[2J")
err := service.client.ChatCompletionStream(
client := *service.client
err := client.ChatCompletionStream(
service.ctx,
req, func(out *gpt3.ChatCompletionStreamResponse) {
resp.ID = out.ID
Expand Down Expand Up @@ -145,7 +146,8 @@ func (c *Prompt) SendChatCompletion(service Agent) *gpt3.ChatCompletionResponse
N: *gpt3.IntPtr(service.promptProperties.Results),
}

resp, err := service.client.ChatCompletion(
client := *service.client
resp, err := client.ChatCompletion(
service.ctx,
req)

Expand Down Expand Up @@ -174,7 +176,8 @@ func (c *Prompt) SendCompletion(service Agent) *gpt3.CompletionResponse {
LogProbs: gpt3.IntPtr(service.promptProperties.Probabilities),
Echo: true}

resp, err := service.client.CompletionWithEngine(
client := *service.client
resp, err := client.CompletionWithEngine(
service.ctx,
service.engineProperties.Model,
req)
Expand Down Expand Up @@ -217,7 +220,8 @@ func (c *Prompt) SendStreamingCompletion(service Agent) *gpt3.CompletionResponse

fmt.Print("\033[H\033[2J")
isOnce := false
err := service.client.CompletionStreamWithEngine(
client := *service.client
err := client.CompletionStreamWithEngine(
service.ctx,
service.engineProperties.Model,
req, func(out *gpt3.CompletionResponse) {
Expand Down Expand Up @@ -284,7 +288,8 @@ func (c *Prompt) SendEditPrompt(service Agent) *gpt3.EditsResponse {
TopP: gpt3.Float32Ptr(service.engineProperties.TopP),
N: gpt3.IntPtr(service.promptProperties.Results)}

resp, err := service.client.Edits(
client := *service.client
resp, err := client.Edits(
service.ctx,
req)

Expand All @@ -306,7 +311,8 @@ func (c *Prompt) SendEmbeddingPrompt(service Agent) *gpt3.EmbeddingsResponse {
Input: service.promptProperties.PromptContext,
}

resp, err := service.client.Embeddings(
client := *service.client
resp, err := client.Embeddings(
service.ctx,
req)

Expand Down Expand Up @@ -378,7 +384,8 @@ func (c *Prompt) SendPredictablePrompt(service Agent) *model.PredictResponse {

// GetListModels - Get actual list of available models
func (c *Prompt) GetListModels(service Agent) *gpt3.EnginesResponse {
resp, err := service.client.Engines(service.ctx)
client := *service.client
resp, err := client.Engines(service.ctx)

var event EventManager
event.Errata(err)
Expand Down
18 changes: 0 additions & 18 deletions src/template/assistant-mode

This file was deleted.

14 changes: 0 additions & 14 deletions src/template/developer-mode

This file was deleted.

9 changes: 0 additions & 9 deletions src/template/prompter-mode

This file was deleted.

0 comments on commit 78b7214

Please sign in to comment.