Skip to content

Commit c759844

Browse files
committed
feat: add streaming support to OpenAI provider and related commands
- Add `isStream` flag to `commit` and `review` commands - Introduce `stream` field in the `Client` struct in the OpenAI provider - Implement `CreateChatCompletionStream` method in the OpenAI provider - Add `WithStream` option for configuring the `stream` field in the OpenAI provider - Update `config` struct to include `stream` field and set default to `false` Signed-off-by: Bo-Yi Wu <[email protected]>
1 parent 4e27d4c commit c759844

File tree

4 files changed

+49
-0
lines changed

4 files changed

+49
-0
lines changed

cmd/commit.go

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ var (
4040

4141
defaultTimeout = 30 * time.Second
4242
noConfirm = false
43+
isStream = false
4344
)
4445

4546
func init() {
@@ -64,6 +65,8 @@ func init() {
6465
"show prompt only, don't send request to openai")
6566
commitCmd.PersistentFlags().BoolVar(&noConfirm, "no_confirm", false,
6667
"skip confirmation prompt")
68+
commitCmd.PersistentFlags().BoolVar(&isStream, "stream", false,
69+
"streaming completion")
6770
_ = viper.BindPFlag("output.file", commitCmd.PersistentFlags().Lookup("file"))
6871
}
6972

cmd/review.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ func init() {
2929
"replace the tip of the current branch by creating a new commit.")
3030
reviewCmd.PersistentFlags().BoolVar(&promptOnly, "prompt_only", false,
3131
"show prompt only, don't send request to openai")
32+
reviewCmd.PersistentFlags().BoolVar(&isStream, "stream", false, "streaming mode")
3233
}
3334

3435
var reviewCmd = &cobra.Command{

provider/openai/openai.go

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,9 @@ type Client struct {
3636
// Positive values penalize new tokens based on their existing frequency in the text so far,
3737
// decreasing the model's likelihood to repeat the same line verbatim.
3838
frequencyPenalty float32
39+
40+
// stream is a flag that indicates whether the client is using a stream or not.
41+
stream bool
3942
}
4043

4144
type Response struct {
@@ -145,6 +148,38 @@ func (c *Client) CreateFunctionCall(
145148
return c.client.CreateChatCompletion(ctx, req)
146149
}
147150

151+
// CreateChatCompletionStream is an API call to create a stream for a chat message.
152+
func (c *Client) CreateChatCompletionStream(
153+
ctx context.Context,
154+
content string,
155+
) (stream *openai.ChatCompletionStream, err error) {
156+
req := openai.ChatCompletionRequest{
157+
Model: c.model,
158+
MaxTokens: c.maxTokens,
159+
Temperature: c.temperature,
160+
TopP: c.topP,
161+
FrequencyPenalty: c.frequencyPenalty,
162+
PresencePenalty: c.presencePenalty,
163+
Messages: []openai.ChatCompletionMessage{
164+
{
165+
Role: openai.ChatMessageRoleAssistant,
166+
Content: "You are a helpful assistant.",
167+
},
168+
{
169+
Role: openai.ChatMessageRoleUser,
170+
Content: content,
171+
},
172+
},
173+
}
174+
175+
if checkOSeriesModels.MatchString(c.model) {
176+
req.MaxTokens = 0
177+
req.MaxCompletionTokens = c.maxTokens
178+
}
179+
180+
return c.client.CreateChatCompletionStream(ctx, req)
181+
}
182+
148183
// CreateChatCompletion is an API call to create a completion for a chat message.
149184
func (c *Client) CreateChatCompletion(
150185
ctx context.Context,
@@ -208,6 +243,7 @@ func New(opts ...Option) (*Client, error) {
208243
model: cfg.model,
209244
maxTokens: cfg.maxTokens,
210245
temperature: cfg.temperature,
246+
stream: cfg.stream,
211247
}
212248

213249
// Create a new OpenAI config object with the given API token and other optional fields.

provider/openai/options.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,13 @@ func WithFrequencyPenalty(val float32) Option {
165165
})
166166
}
167167

168+
// WithStream returns a new Option that sets the stream for the client configuration.
169+
func WithStream(val bool) Option {
170+
return optionFunc(func(c *config) {
171+
c.stream = val
172+
})
173+
}
174+
168175
// config is a struct that stores configuration options for the instrumentation.
169176
type config struct {
170177
baseURL string
@@ -183,6 +190,7 @@ type config struct {
183190

184191
provider core.Platform
185192
skipVerify bool
193+
stream bool
186194
headers []string
187195
apiVersion string
188196
}
@@ -211,6 +219,7 @@ func newConfig(opts ...Option) *config {
211219
temperature: defaultTemperature,
212220
provider: core.OpenAI,
213221
topP: defaultTopP,
222+
stream: false,
214223
}
215224

216225
// Apply each of the given options to the config object.

0 commit comments

Comments
 (0)