You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

openai.go 2.7 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. // Copyright 2023 The casbin Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. package model
  15. import (
  16. "context"
  17. "fmt"
  18. "io"
  19. "net/http"
  20. "strings"
  21. "github.com/casbin/casibase/proxy"
  22. "github.com/sashabaranov/go-openai"
  23. )
  24. type OpenAiModelProvider struct {
  25. subType string
  26. secretKey string
  27. }
  28. func NewOpenAiModelProvider(subType string, secretKey string) (*OpenAiModelProvider, error) {
  29. return &OpenAiModelProvider{subType: subType, secretKey: secretKey}, nil
  30. }
  31. func getProxyClientFromToken(authToken string) *openai.Client {
  32. config := openai.DefaultConfig(authToken)
  33. config.HTTPClient = proxy.ProxyHttpClient
  34. c := openai.NewClientWithConfig(config)
  35. return c
  36. }
  37. func (p *OpenAiModelProvider) QueryText(question string, writer io.Writer, builder *strings.Builder) error {
  38. client := getProxyClientFromToken(p.secretKey)
  39. ctx := context.Background()
  40. flusher, ok := writer.(http.Flusher)
  41. if !ok {
  42. return fmt.Errorf("writer does not implement http.Flusher")
  43. }
  44. model := p.subType
  45. if model == "" {
  46. model = openai.GPT3TextDavinci003
  47. }
  48. // https://platform.openai.com/tokenizer
  49. // https://github.com/pkoukk/tiktoken-go#available-encodings
  50. promptTokens, err := GetTokenSize(model, question)
  51. if err != nil {
  52. return err
  53. }
  54. // https://platform.openai.com/docs/models/gpt-3-5
  55. maxTokens := 4097 - promptTokens
  56. respStream, err := client.CreateCompletionStream(
  57. ctx,
  58. openai.CompletionRequest{
  59. Model: model,
  60. Prompt: question,
  61. MaxTokens: maxTokens,
  62. Stream: true,
  63. },
  64. )
  65. if err != nil {
  66. return err
  67. }
  68. defer respStream.Close()
  69. isLeadingReturn := true
  70. for {
  71. completion, streamErr := respStream.Recv()
  72. if streamErr != nil {
  73. if streamErr == io.EOF {
  74. break
  75. }
  76. return streamErr
  77. }
  78. data := completion.Choices[0].Text
  79. if isLeadingReturn && len(data) != 0 {
  80. if strings.Count(data, "\n") == len(data) {
  81. continue
  82. } else {
  83. isLeadingReturn = false
  84. }
  85. }
  86. // Write the streamed data as Server-Sent Events
  87. if _, err = fmt.Fprintf(writer, "event: message\ndata: %s\n\n", data); err != nil {
  88. return err
  89. }
  90. flusher.Flush()
  91. // Append the response to the strings.Builder
  92. builder.WriteString(data)
  93. }
  94. return nil
  95. }