Skip to content

Commit

Permalink
openai: use gpt-4o by default
Browse files Browse the repository at this point in the history
faster, better and cheaper than gpt-4-turbo
  • Loading branch information
brainexe committed Jun 10, 2024
1 parent 757d506 commit 465f345
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 30 deletions.
1 change: 1 addition & 0 deletions command/export/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"github.com/slack-go/slack"
)

// limit the number of exported messages (incl thread messages)
const limit = 3000

// NewExportCommand is a constructor to create a new export command
Expand Down
2 changes: 1 addition & 1 deletion command/openai/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ func (c *Config) IsEnabled() bool {

var defaultConfig = Config{
APIHost: apiHost,
Model: "gpt-3.5-turbo", // aka model behind ChatGPT
Model: "gpt-4o", // aka model behind ChatGPT
UpdateInterval: time.Second * 1,
HistorySize: 25,
InitialSystemMessage: "You are a helpful Slack bot. By default, keep your answer short and truthful",
Expand Down
36 changes: 18 additions & 18 deletions command/openai/openai_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,25 +77,25 @@ func TestOpenai(t *testing.T) {
apiCompletionURL,
[]testRequest{
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
`{"model":"gpt-4o","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"The answer "},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"content":"The answer "},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"is 2"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"content":"is 2"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]`,
http.StatusOK,
},
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"user","content":"whats 1+1?"},{"role":"assistant","content":"The answer is 2"},{"role":"user","content":"whats 2+1?"}],"stream":true}`,
`{"model":"gpt-4o","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"user","content":"whats 1+1?"},{"role":"assistant","content":"The answer is 2"},{"role":"user","content":"whats 2+1?"}],"stream":true}`,
`{
"id": "chatcmpl-6p9XYPYSTTRi0xEviKjjilqrWU2Ve",
"object": "chat.completion",
"created": 1677649420,
"model": "gpt-3.5-turbo",
"model": "gpt-4o",
"usage": {"prompt_tokens": 56, "completion_tokens": 31, "total_tokens": 87},
"choices": [
{
Expand Down Expand Up @@ -171,7 +171,7 @@ data: [DONE]`,
apiCompletionURL,
[]testRequest{
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`{"model":"gpt-4o","messages":[{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`{
"error": {
"code": "invalid_api_key",
Expand Down Expand Up @@ -212,7 +212,7 @@ data: [DONE]`,

[]testRequest{
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`{"model":"gpt-4o","messages":[{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`{
"error": {
"code": "invalid_api_key",
Expand Down Expand Up @@ -252,12 +252,12 @@ data: [DONE]`,
apiCompletionURL,
[]testRequest{
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"whats 1+1?"}]}`,
`{"model":"gpt-4o","messages":[{"role":"user","content":"whats 1+1?"}]}`,
`{
"id": "chatcmpl-6p9XYPYSTTRi0xEviKjjilqrWU2Ve",
"object": "chat.completion",
"created": 1677649420,
"model": "gpt-3.5-turbo",
"model": "gpt-4o",
"usage": {"prompt_tokens": 56, "completion_tokens": 31, "total_tokens": 87},
"choices": [
{
Expand Down Expand Up @@ -293,12 +293,12 @@ data: [DONE]`,
apiCompletionURL,
[]testRequest{
{
`{"model":"gpt-3.5-turbo","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"system","content":"This is a Slack bot receiving a slack thread s context, using slack user ids as identifiers. Please use user mentions in the format \u003c@U123456\u003e"},{"role":"user","content":"User \u003c@U1234\u003e wrote: thread message 1"},{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
`{"model":"gpt-4o","messages":[{"role":"system","content":"You are a helpful Slack bot. By default, keep your answer short and truthful"},{"role":"system","content":"This is a Slack bot receiving a slack thread s context, using slack user ids as identifiers. Please use user mentions in the format \u003c@U123456\u003e"},{"role":"user","content":"User \u003c@U1234\u003e wrote: thread message 1"},{"role":"user","content":"whats 1+1?"}],"stream":true}`,
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Jolo!"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"content":"Jolo!"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]`,
http.StatusOK,
Expand Down Expand Up @@ -352,11 +352,11 @@ data: [DONE]`,
[]testRequest{
{
`{"model":"dummy-test","messages":[{"role":"user","content":"User \u003c@U1234\u003e wrote: i had a great weekend"},{"role":"user","content":"summarize this thread "}],"stream":true}`,
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
`data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"role":"assistant"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{"content":"Jolo!"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{"content":"Jolo!"},"index":0,"finish_reason":null}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-3.5-turbo-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: {"id":"chatcmpl-6tuxebSPdmd2IJpb8GrZXHiYXON6r","object":"chat.completion.chunk","created":1678785018,"model":"gpt-4o-0301","choices":[{"delta":{},"index":0,"finish_reason":"stop"}]}
data: [DONE]`,
http.StatusOK,
Expand Down
14 changes: 7 additions & 7 deletions command/openai/tokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@ import (
// https://platform.openai.com/docs/models/gpt-3-5
// https://platform.openai.com/docs/models/gpt-4
var maxTokens = map[string]int{
"gpt-4": 8192,
"gpt-4-32k": 32768,
"gpt-4-1106-preview": 128000,
"gpt-4-vision-preview": 128000,
"gpt-3.5-turbo-16k": 16385,
"gpt-3.5-turbo": 4096,
"dummy-test": 100, // just for testing
"gpt-4": 8192,
"gpt-4-32k": 32768,
"gpt-4-1106-preview": 128000,
"gpt-4-turbo": 128000,
"gpt-4o": 128000,
"gpt-3.5-turbo": 16385,
"dummy-test": 100, // just for testing
}

var modelDateRe = regexp.MustCompile(`-\d{4}`)
Expand Down
8 changes: 4 additions & 4 deletions command/openai/tokens_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package openai

import (
"fmt"
"testing"

"github.com/stretchr/testify/assert"
Expand All @@ -14,15 +15,14 @@ func TestModels(t *testing.T) {
{"", 128000},
{"jolo", 128000},
{"gpt-4", 8192},
{"gpt-4o", 128000},
{"gpt-4-0613", 8192},
{"gpt-4-32k-0613", 32768},
{"gpt-3.5-turbo", 4096},
{"gpt-3.5-turbo-16k-0613", 16385},
{"gpt-3.5-turbo", 16385},
}

for _, testCase := range modelsTestCases {
actual := getMaxTokensForModel(testCase.input)
assert.Equal(t, testCase.expected, actual)
assert.Equal(t, testCase.expected, actual, fmt.Sprintf("Model %s", testCase.input))
}
}

Expand Down

0 comments on commit 465f345

Please sign in to comment.