@@ -20,7 +20,6 @@ import (
2020 dto "github.com/prometheus/client_model/go"
2121 "github.com/openai/openai-go"
2222 "github.com/openai/openai-go/option"
23- "go.opentelemetry.io/otel/attribute"
2423)
2524
2625type Message struct {
@@ -425,6 +424,12 @@ func handleChat(client *openai.Client, model string) http.HandlerFunc {
425424 // Track metrics for input tokens
426425 chatTokensCounter .WithLabelValues ("input" , model ).Add (float64 (inputTokens ))
427426
427+ // Start model timing
428+ start := time .Now ()
429+ modelStartTime := time .Now ()
430+ var firstTokenTime time.Time
431+ outputTokens := 0
432+
428433 var messages []openai.ChatCompletionMessageParamUnion
429434 for _ , msg := range req .Messages {
430435 var message openai.ChatCompletionMessageParamUnion
@@ -438,11 +443,6 @@ func handleChat(client *openai.Client, model string) http.HandlerFunc {
438443 messages = append (messages , message )
439444 }
440445
441- // Start model timing
442- modelStartTime := time .Now ()
443- var firstTokenTime time.Time
444- outputTokens := 0
445-
446446 // Add the user message to the conversation
447447 messages = append (messages , openai .UserMessage (req .Message ))
448448
@@ -492,4 +492,4 @@ func handleChat(client *openai.Client, model string) http.HandlerFunc {
492492 return
493493 }
494494 }
495- }
495+ }
0 commit comments