adaptor.go 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273
  1. package ollama
  2. import (
  3. "errors"
  4. "github.com/gin-gonic/gin"
  5. "io"
  6. "net/http"
  7. "one-api/dto"
  8. "one-api/relay/channel"
  9. "one-api/relay/channel/openai"
  10. relaycommon "one-api/relay/common"
  11. relayconstant "one-api/relay/constant"
  12. "one-api/service"
  13. )
  14. type Adaptor struct {
  15. }
  16. func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
  17. }
  18. func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
  19. switch info.RelayMode {
  20. case relayconstant.RelayModeEmbeddings:
  21. return info.BaseUrl + "/api/embeddings", nil
  22. default:
  23. return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
  24. }
  25. }
  26. func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
  27. channel.SetupApiRequestHeader(info, c, req)
  28. return nil
  29. }
  30. func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error) {
  31. if request == nil {
  32. return nil, errors.New("request is nil")
  33. }
  34. switch relayMode {
  35. case relayconstant.RelayModeEmbeddings:
  36. return requestOpenAI2Embeddings(*request), nil
  37. default:
  38. return requestOpenAI2Ollama(*request), nil
  39. }
  40. }
  41. func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
  42. return channel.DoApiRequest(a, c, info, requestBody)
  43. }
  44. func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
  45. if info.IsStream {
  46. var responseText string
  47. err, responseText, _ = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
  48. usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
  49. } else {
  50. if info.RelayMode == relayconstant.RelayModeEmbeddings {
  51. err, usage = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
  52. } else {
  53. err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
  54. }
  55. }
  56. return
  57. }
  58. func (a *Adaptor) GetModelList() []string {
  59. return ModelList
  60. }
  61. func (a *Adaptor) GetChannelName() string {
  62. return ChannelName
  63. }