| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273 |
- package ollama
- import (
- "errors"
- "github.com/gin-gonic/gin"
- "io"
- "net/http"
- "one-api/dto"
- "one-api/relay/channel"
- "one-api/relay/channel/openai"
- relaycommon "one-api/relay/common"
- relayconstant "one-api/relay/constant"
- "one-api/service"
- )
- type Adaptor struct {
- }
- func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
- }
- func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
- switch info.RelayMode {
- case relayconstant.RelayModeEmbeddings:
- return info.BaseUrl + "/api/embeddings", nil
- default:
- return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
- }
- }
- func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
- channel.SetupApiRequestHeader(info, c, req)
- return nil
- }
- func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error) {
- if request == nil {
- return nil, errors.New("request is nil")
- }
- switch relayMode {
- case relayconstant.RelayModeEmbeddings:
- return requestOpenAI2Embeddings(*request), nil
- default:
- return requestOpenAI2Ollama(*request), nil
- }
- }
- func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
- return channel.DoApiRequest(a, c, info, requestBody)
- }
- func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
- if info.IsStream {
- var responseText string
- err, responseText, _ = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
- usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
- } else {
- if info.RelayMode == relayconstant.RelayModeEmbeddings {
- err, usage = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
- } else {
- err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
- }
- }
- return
- }
- func (a *Adaptor) GetModelList() []string {
- return ModelList
- }
- func (a *Adaptor) GetChannelName() string {
- return ChannelName
- }
|