func.go 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190
  1. package compapi
  2. import (
  3. "context"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "net/http"
  8. "wechat-api/internal/types"
  9. "wechat-api/internal/utils/contextkey"
  10. openai "github.com/openai/openai-go"
  11. "github.com/openai/openai-go/option"
  12. "github.com/openai/openai-go/packages/ssestream"
  13. "github.com/zeromicro/go-zero/rest/httpx"
  14. )
  15. func NewAiClient(apiKey string, apiBase string) *openai.Client {
  16. return openai.NewClient(option.WithAPIKey(apiKey),
  17. option.WithBaseURL(apiBase))
  18. }
  19. func NewFastgptClient(apiKey string) *openai.Client {
  20. //http://fastgpt.ascrm.cn/api/v1/
  21. return openai.NewClient(option.WithAPIKey(apiKey),
  22. option.WithBaseURL("http://fastgpt.ascrm.cn/api/v1/"))
  23. }
  24. func NewDeepSeekClient(apiKey string) *openai.Client {
  25. return openai.NewClient(option.WithAPIKey(apiKey),
  26. option.WithBaseURL("https://api.deepseek.com"))
  27. }
  28. func DoChatCompletions(ctx context.Context, client *openai.Client, chatInfo *types.CompApiReq) (*types.CompOpenApiResp, error) {
  29. var (
  30. jsonBytes []byte
  31. err error
  32. )
  33. emptyParams := openai.ChatCompletionNewParams{}
  34. if jsonBytes, err = json.Marshal(chatInfo); err != nil {
  35. return nil, err
  36. }
  37. customResp := types.CompOpenApiResp{}
  38. reqBodyOps := option.WithRequestBody("application/json", jsonBytes)
  39. respBodyOps := option.WithResponseBodyInto(&customResp)
  40. if _, err = client.Chat.Completions.New(ctx, emptyParams, reqBodyOps, respBodyOps); err != nil {
  41. return nil, err
  42. }
  43. return &customResp, nil
  44. }
  45. func DoChatCompletionsStream(ctx context.Context, client *openai.Client, chatInfo *types.CompApiReq) (res *types.CompOpenApiResp, err error) {
  46. var (
  47. jsonBytes []byte
  48. raw *http.Response
  49. //raw []byte
  50. ok bool
  51. hw http.ResponseWriter
  52. )
  53. hw, ok = contextkey.HttpResponseWriterKey.GetValue(ctx) //context取出http.ResponseWriter
  54. if !ok {
  55. return nil, errors.New("content get http writer err")
  56. }
  57. flusher, ok := (hw).(http.Flusher)
  58. if !ok {
  59. http.Error(hw, "Streaming unsupported!", http.StatusInternalServerError)
  60. }
  61. emptyParams := openai.ChatCompletionNewParams{}
  62. if jsonBytes, err = json.Marshal(chatInfo); err != nil {
  63. return nil, err
  64. }
  65. reqBodyOps := option.WithRequestBody("application/json", jsonBytes)
  66. respBodyOps := option.WithResponseBodyInto(&raw)
  67. if _, err = client.Chat.Completions.New(ctx, emptyParams, reqBodyOps, respBodyOps, option.WithJSONSet("stream", true)); err != nil {
  68. return nil, err
  69. }
  70. //设置流式输出头 http1.1
  71. hw.Header().Set("Content-Type", "text/event-stream;charset=utf-8")
  72. hw.Header().Set("Connection", "keep-alive")
  73. hw.Header().Set("Cache-Control", "no-cache")
  74. chatStream := ssestream.NewStream[ApiRespStreamChunk](ApiRespStreamDecoder(raw), err)
  75. defer chatStream.Close()
  76. for chatStream.Next() {
  77. chunk := chatStream.Current()
  78. fmt.Fprintf(hw, "event:%s\ndata:%s\n\n", chunk.Event, chunk.Data.RAW)
  79. flusher.Flush()
  80. //time.Sleep(1 * time.Millisecond)
  81. }
  82. fmt.Fprintf(hw, "event:%s\ndata:%s\n\n", "answer", "[DONE]")
  83. flusher.Flush()
  84. httpx.Ok(hw)
  85. return nil, nil
  86. }
  87. func NewChatCompletions(ctx context.Context, client *openai.Client, chatInfo *types.CompApiReq) (*types.CompOpenApiResp, error) {
  88. if chatInfo.Stream {
  89. return DoChatCompletionsStream(ctx, client, chatInfo)
  90. } else {
  91. return DoChatCompletions(ctx, client, chatInfo)
  92. }
  93. }
  94. func NewFastgptChatCompletions(ctx context.Context, apiKey string, apiBase string, chatInfo *types.CompApiReq) (*types.CompOpenApiResp, error) {
  95. client := NewAiClient(apiKey, apiBase)
  96. return NewChatCompletions(ctx, client, chatInfo)
  97. }
  98. func NewDeepSeekChatCompletions(ctx context.Context, apiKey string, chatInfo *types.CompApiReq, chatModel openai.ChatModel) (res *types.CompOpenApiResp, err error) {
  99. client := NewDeepSeekClient(apiKey)
  100. if chatModel != ChatModelDeepSeekV3 {
  101. chatModel = ChatModelDeepSeekR1
  102. }
  103. chatInfo.Model = chatModel
  104. return NewChatCompletions(ctx, client, chatInfo)
  105. }
  106. func DoChatCompletionsStreamOld(ctx context.Context, client *openai.Client, chatInfo *types.CompApiReq) (res *types.CompOpenApiResp, err error) {
  107. var (
  108. jsonBytes []byte
  109. )
  110. emptyParams := openai.ChatCompletionNewParams{}
  111. if jsonBytes, err = json.Marshal(chatInfo); err != nil {
  112. return nil, err
  113. }
  114. reqBodyOps := option.WithRequestBody("application/json", jsonBytes)
  115. //customResp := types.CompOpenApiResp{}
  116. //respBodyOps := option.WithResponseBodyInto(&customResp)
  117. //chatStream := client.Chat.Completions.NewStreaming(ctx, emptyParams, reqBodyOps, respBodyOps)
  118. chatStream := client.Chat.Completions.NewStreaming(ctx, emptyParams, reqBodyOps)
  119. // optionally, an accumulator helper can be used
  120. acc := openai.ChatCompletionAccumulator{}
  121. httpWriter, ok := ctx.Value("HttpResp-Writer").(http.ResponseWriter)
  122. if !ok {
  123. return nil, errors.New("content get writer err")
  124. }
  125. //httpWriter.Header().Set("Content-Type", "text/event-stream;charset=utf-8")
  126. //httpWriter.Header().Set("Connection", "keep-alive")
  127. //httpWriter.Header().Set("Cache-Control", "no-cache")
  128. idx := 0
  129. for chatStream.Next() {
  130. chunk := chatStream.Current()
  131. acc.AddChunk(chunk)
  132. fmt.Printf("=====>get %d chunk:%v\n", idx, chunk)
  133. if _, err := fmt.Fprintf(httpWriter, "%v", chunk); err != nil {
  134. fmt.Printf("Error writing to client:%v \n", err)
  135. break
  136. }
  137. if content, ok := acc.JustFinishedContent(); ok {
  138. println("Content stream finished:", content)
  139. }
  140. // if using tool calls
  141. if tool, ok := acc.JustFinishedToolCall(); ok {
  142. println("Tool call stream finished:", tool.Index, tool.Name, tool.Arguments)
  143. }
  144. if refusal, ok := acc.JustFinishedRefusal(); ok {
  145. println("Refusal stream finished:", refusal)
  146. }
  147. // it's best to use chunks after handling JustFinished events
  148. if len(chunk.Choices) > 0 {
  149. idx++
  150. fmt.Printf("idx:%d get =>'%s'\n", idx, chunk.Choices[0].Delta.Content)
  151. }
  152. }
  153. if err := chatStream.Err(); err != nil {
  154. return nil, err
  155. }
  156. return nil, nil
  157. }