package openai import ( "io" "net/http" "github.com/QuantumNous/new-api/common" "github.com/QuantumNous/new-api/dto" "github.com/QuantumNous/new-api/service" "github.com/QuantumNous/new-api/types" "github.com/gin-gonic/gin" ) func OaiResponsesCompactionHandler(c *gin.Context, resp *http.Response) (*dto.Usage, *types.NewAPIError) { defer service.CloseResponseBodyGracefully(resp) responseBody, err := io.ReadAll(resp.Body) if err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError) } var compactResp dto.OpenAIResponsesCompactionResponse if err := common.Unmarshal(responseBody, &compactResp); err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) } if oaiError := compactResp.GetOpenAIError(); oaiError != nil && oaiError.Type != "" { return nil, types.WithOpenAIError(*oaiError, resp.StatusCode) } service.IOCopyBytesGracefully(c, resp, responseBody) usage := dto.Usage{} if compactResp.Usage != nil { usage.PromptTokens = compactResp.Usage.InputTokens usage.CompletionTokens = compactResp.Usage.OutputTokens usage.TotalTokens = compactResp.Usage.TotalTokens if compactResp.Usage.InputTokensDetails != nil { usage.PromptTokensDetails.CachedTokens = compactResp.Usage.InputTokensDetails.CachedTokens } } return &usage, nil }