2023-04-25 20:32:08 -04:00
|
|
|
package llm
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
|
|
|
|
"github.com/sashabaranov/go-openai"
|
2023-05-02 22:07:10 -04:00
|
|
|
"go.uber.org/zap"
|
2023-04-25 20:32:08 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
type OpenAIClient struct {
|
2023-05-08 20:14:19 -04:00
|
|
|
log *zap.Logger
|
|
|
|
client *openai.Client
|
|
|
|
defaultModel string
|
2023-04-25 20:32:08 -04:00
|
|
|
}
|
|
|
|
|
2023-05-08 20:14:19 -04:00
|
|
|
func NewOpenAIClient(log *zap.Logger, defaultModel, token string) *OpenAIClient {
|
2023-04-25 20:32:08 -04:00
|
|
|
return &OpenAIClient{
|
2023-05-08 20:14:19 -04:00
|
|
|
log: log,
|
|
|
|
client: openai.NewClient(token),
|
|
|
|
defaultModel: defaultModel,
|
2023-04-25 20:32:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-08 20:14:19 -04:00
|
|
|
func (oc *OpenAIClient) EvaluateCCR(ctx context.Context, model string, req CodeChangeRequest) (res CodeChangeResponse, err error) {
|
|
|
|
if model == "" {
|
|
|
|
model = oc.defaultModel
|
|
|
|
}
|
2023-04-25 20:32:08 -04:00
|
|
|
resp, err := oc.client.CreateChatCompletion(
|
|
|
|
ctx,
|
|
|
|
openai.ChatCompletionRequest{
|
2023-05-08 20:14:19 -04:00
|
|
|
Model: model,
|
2023-04-25 20:32:08 -04:00
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: openai.ChatMessageRoleUser,
|
|
|
|
Content: req.String(),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
if err != nil {
|
2023-05-02 22:07:10 -04:00
|
|
|
oc.log.Error("chat completion error", zap.Error(err))
|
2023-04-25 20:32:08 -04:00
|
|
|
return res, err
|
|
|
|
}
|
|
|
|
|
|
|
|
choice := resp.Choices[0].Message.Content
|
|
|
|
|
2023-05-04 20:01:46 -04:00
|
|
|
// TODO make debug log when I figure out how to config that
|
|
|
|
oc.log.Info("got response from llm", zap.String("output", choice))
|
2023-05-02 22:07:10 -04:00
|
|
|
|
2023-04-25 20:32:08 -04:00
|
|
|
return ParseCodeChangeResponse(choice), nil
|
|
|
|
}
|
2023-05-08 20:14:19 -04:00
|
|
|
|
|
|
|
func (oc *OpenAIClient) EvaluateDiffComment(ctx context.Context, model string, req DiffCommentRequest) (res DiffCommentResponse, err error) {
|
|
|
|
if model == "" {
|
|
|
|
model = oc.defaultModel
|
|
|
|
}
|
|
|
|
resp, err := oc.client.CreateChatCompletion(
|
|
|
|
ctx,
|
|
|
|
openai.ChatCompletionRequest{
|
|
|
|
Model: model,
|
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: openai.ChatMessageRoleUser,
|
|
|
|
Content: req.String(),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
oc.log.Error("chat completion error", zap.Error(err))
|
|
|
|
return res, err
|
|
|
|
}
|
|
|
|
|
|
|
|
choice := resp.Choices[0].Message.Content
|
|
|
|
|
|
|
|
// TODO make debug log when I figure out how to config that
|
|
|
|
oc.log.Info("got response from llm", zap.String("output", choice))
|
|
|
|
|
|
|
|
return ParseDiffCommentResponse(choice), nil
|
|
|
|
}
|