package middleware import ( "fmt" "net/http" "github.com/QuantumNous/new-api/common" "github.com/QuantumNous/new-api/constant" "github.com/QuantumNous/new-api/model" "github.com/QuantumNous/new-api/service" "github.com/gin-gonic/gin" ) // SetupGeminiFileChannel selects a Gemini channel for File API operations // This middleware is used instead of Distribute() for File API endpoints // since they don't require model-based channel selection func SetupGeminiFileChannel() func(c *gin.Context) { return func(c *gin.Context) { // Get user's group usingGroup := common.GetContextKeyString(c, constant.ContextKeyUsingGroup) if usingGroup == "" { usingGroup = common.GetContextKeyString(c, constant.ContextKeyUserGroup) } // Try multiple common Gemini models to find an available channel // The actual File API doesn't require a model, but we need one to select a channel geminiModels := []string{ "gemini-2.0-flash", "gemini-1.5-flash", "gemini-1.5-pro", "gemini-2.0-flash-exp", "gemini-pro", "gemini-1.0-pro", } var channel *model.Channel var err error var lastError error // Try each model until we find an available channel for _, modelName := range geminiModels { channel, _, err = service.CacheGetRandomSatisfiedChannel(&service.RetryParam{ Ctx: c, ModelName: modelName, TokenGroup: usingGroup, Retry: common.GetPointer(0), }) if err == nil && channel != nil { // Found a channel, setup context and continue newAPIError := SetupContextForSelectedChannel(c, channel, modelName) if newAPIError != nil { abortWithOpenAiMessage(c, http.StatusServiceUnavailable, fmt.Sprintf("设置 Gemini 渠道失败: %s", newAPIError.Error())) return } c.Next() return } lastError = err } // No channel found with any of the models errorMsg := "没有可用的 Gemini 文件 API 渠道" if lastError != nil { errorMsg = fmt.Sprintf("获取 Gemini 文件 API 渠道失败: %s", lastError.Error()) } abortWithOpenAiMessage(c, http.StatusServiceUnavailable, errorMsg) } }