openai.go 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667
  1. package llm_client
  2. import (
  3. "bufio"
  4. "bytes"
  5. "context"
  6. "encoding/json"
  7. "io"
  8. "net/http"
  9. "net/url"
  10. "strings"
  11. "time"
  12. "github.com/mark3labs/mcp-go/mcp"
  13. "yunion.io/x/pkg/errors"
  14. api "yunion.io/x/onecloud/pkg/apis/llm"
  15. "yunion.io/x/onecloud/pkg/llm/models"
  16. )
  17. func init() {
  18. models.RegisterLLMClientDriver(newOpenAI())
  19. }
  20. type openai struct{}
  21. func newOpenAI() models.ILLMClient {
  22. return new(openai)
  23. }
  24. func (o *openai) GetType() api.LLMClientType {
  25. return api.LLM_CLIENT_OPENAI
  26. }
  27. func buildOpenAIModelsURL(endpoint string) (string, error) {
  28. endpoint = strings.TrimSpace(endpoint)
  29. if endpoint == "" {
  30. return "", errors.Error("endpoint is empty")
  31. }
  32. baseURL, err := url.Parse(endpoint)
  33. if err != nil {
  34. return "", errors.Wrapf(err, "invalid endpoint URL %s", endpoint)
  35. }
  36. baseURL.RawQuery = ""
  37. baseURL.Fragment = ""
  38. path := strings.TrimRight(baseURL.Path, "/")
  39. switch {
  40. case path == "":
  41. baseURL.Path = "/v1/models"
  42. case strings.HasSuffix(path, "/v1/models"):
  43. baseURL.Path = path
  44. case strings.HasSuffix(path, "/v1"):
  45. baseURL.Path = path + "/models"
  46. default:
  47. baseURL.Path = path + "/v1/models"
  48. }
  49. return baseURL.String(), nil
  50. }
  51. func listOpenAIModelsWithClient(ctx context.Context, client *http.Client, endpoint string) ([]string, error) {
  52. modelsURL, err := buildOpenAIModelsURL(endpoint)
  53. if err != nil {
  54. return nil, err
  55. }
  56. req, err := http.NewRequestWithContext(ctx, http.MethodGet, modelsURL, nil)
  57. if err != nil {
  58. return nil, errors.Wrap(err, "create request")
  59. }
  60. req.Header.Set("Accept", "application/json")
  61. resp, err := client.Do(req)
  62. if err != nil {
  63. return nil, errors.Wrap(err, "do request")
  64. }
  65. defer resp.Body.Close()
  66. body, err := io.ReadAll(resp.Body)
  67. if err != nil {
  68. return nil, errors.Wrap(err, "read response body")
  69. }
  70. if resp.StatusCode != http.StatusOK {
  71. return nil, errors.Errorf("unexpected status code %d: %s", resp.StatusCode, string(body))
  72. }
  73. var modelResp OpenAIModelsResponse
  74. if err := json.Unmarshal(body, &modelResp); err != nil {
  75. return nil, errors.Wrapf(err, "decode response: %s", string(body))
  76. }
  77. ret := make([]string, 0, len(modelResp.Data))
  78. for _, model := range modelResp.Data {
  79. name := strings.TrimSpace(model.ID)
  80. if name == "" {
  81. continue
  82. }
  83. ret = append(ret, name)
  84. }
  85. return ret, nil
  86. }
  87. func (o *openai) ListModels(ctx context.Context, endpoint string) ([]string, error) {
  88. client := &http.Client{
  89. Timeout: 30 * time.Second,
  90. }
  91. return listOpenAIModelsWithClient(ctx, client, endpoint)
  92. }
  93. func (o *openai) Chat(ctx context.Context, mcpAgent *models.SMCPAgent, messages interface{}, tools interface{}) (models.ILLMChatResponse, error) {
  94. // 转换 messages
  95. var openaiMessages []OpenAIChatMessage
  96. if msgs, ok := messages.([]OpenAIChatMessage); ok {
  97. openaiMessages = msgs
  98. } else if msgs, ok := messages.([]models.ILLMChatMessage); ok {
  99. openaiMessages = make([]OpenAIChatMessage, len(msgs))
  100. for i, msg := range msgs {
  101. // Check if it's an OpenAIChatMessage to preserve ToolCallID and ReasoningContent
  102. if om, ok := msg.(*OpenAIChatMessage); ok {
  103. openaiMessages[i] = *om
  104. } else {
  105. // General conversion
  106. openaiMessages[i] = OpenAIChatMessage{
  107. Role: msg.GetRole(),
  108. Content: msg.GetContent(),
  109. }
  110. // 转换工具调用
  111. if toolCalls := msg.GetToolCalls(); len(toolCalls) > 0 {
  112. openaiMessages[i].ToolCalls = make([]OpenAIToolCall, len(toolCalls))
  113. for j, tc := range toolCalls {
  114. fc := tc.GetFunction()
  115. argsBytes, _ := json.Marshal(fc.GetArguments())
  116. openaiMessages[i].ToolCalls[j] = OpenAIToolCall{
  117. ID: tc.GetId(),
  118. Type: "function",
  119. Function: OpenAIFunctionCall{
  120. Name: fc.GetName(),
  121. Arguments: string(argsBytes),
  122. },
  123. }
  124. }
  125. }
  126. }
  127. }
  128. } else {
  129. return nil, errors.Error("invalid messages type")
  130. }
  131. // 转换 tools
  132. var openaiTools []OpenAITool
  133. if ts, ok := tools.([]OpenAITool); ok {
  134. openaiTools = ts
  135. } else if ts, ok := tools.([]models.ILLMTool); ok {
  136. openaiTools = make([]OpenAITool, len(ts))
  137. for i, t := range ts {
  138. tf := t.GetFunction()
  139. openaiTools[i] = OpenAITool{
  140. Type: t.GetType(),
  141. Function: OpenAIToolFunction{
  142. Name: tf.GetName(),
  143. Description: tf.GetDescription(),
  144. Parameters: tf.GetParameters(),
  145. },
  146. }
  147. }
  148. } else if tools == nil {
  149. openaiTools = nil
  150. }
  151. return o.doChatRequest(ctx, mcpAgent, openaiMessages, openaiTools)
  152. }
  153. type OpenAIModelsResponse struct {
  154. Object string `json:"object,omitempty"`
  155. Data []OpenAIModelEntry `json:"data"`
  156. }
  157. type OpenAIModelEntry struct {
  158. ID string `json:"id"`
  159. Object string `json:"object,omitempty"`
  160. OwnedBy string `json:"owned_by,omitempty"`
  161. }
  162. func (o *openai) ChatStream(ctx context.Context, mcpAgent *models.SMCPAgent, messages interface{}, tools interface{}, onChunk func(models.ILLMChatResponse) error) error {
  163. // 转换 messages
  164. var openaiMessages []OpenAIChatMessage
  165. if msgs, ok := messages.([]OpenAIChatMessage); ok {
  166. openaiMessages = msgs
  167. } else {
  168. var ilMsgs []models.ILLMChatMessage
  169. if msgs, ok := messages.([]models.ILLMChatMessage); ok {
  170. ilMsgs = msgs
  171. } else if msg, ok := messages.(models.ILLMChatMessage); ok {
  172. ilMsgs = []models.ILLMChatMessage{msg}
  173. } else {
  174. return errors.Error("invalid messages type")
  175. }
  176. openaiMessages = make([]OpenAIChatMessage, len(ilMsgs))
  177. for i, msg := range ilMsgs {
  178. // Check if it's an OpenAIChatMessage to preserve ToolCallID and ReasoningContent
  179. if om, ok := msg.(*OpenAIChatMessage); ok {
  180. openaiMessages[i] = *om
  181. } else {
  182. // General conversion
  183. openaiMessages[i] = OpenAIChatMessage{
  184. Role: msg.GetRole(),
  185. Content: msg.GetContent(),
  186. }
  187. // 转换工具调用
  188. if toolCalls := msg.GetToolCalls(); len(toolCalls) > 0 {
  189. openaiMessages[i].ToolCalls = make([]OpenAIToolCall, len(toolCalls))
  190. for j, tc := range toolCalls {
  191. fc := tc.GetFunction()
  192. argsBytes, _ := json.Marshal(fc.GetArguments())
  193. openaiMessages[i].ToolCalls[j] = OpenAIToolCall{
  194. ID: tc.GetId(),
  195. Type: "function",
  196. Function: OpenAIFunctionCall{
  197. Name: fc.GetName(),
  198. Arguments: string(argsBytes),
  199. },
  200. }
  201. }
  202. }
  203. }
  204. }
  205. }
  206. // 转换 tools
  207. var openaiTools []OpenAITool
  208. if ts, ok := tools.([]OpenAITool); ok {
  209. openaiTools = ts
  210. } else if ts, ok := tools.([]models.ILLMTool); ok {
  211. openaiTools = make([]OpenAITool, len(ts))
  212. for i, t := range ts {
  213. tf := t.GetFunction()
  214. openaiTools[i] = OpenAITool{
  215. Type: t.GetType(),
  216. Function: OpenAIToolFunction{
  217. Name: tf.GetName(),
  218. Description: tf.GetDescription(),
  219. Parameters: tf.GetParameters(),
  220. },
  221. }
  222. }
  223. } else if tools == nil {
  224. openaiTools = nil
  225. }
  226. return o.doChatStreamRequest(ctx, mcpAgent, openaiMessages, openaiTools, onChunk)
  227. }
  228. func (o *openai) doChatStreamRequest(ctx context.Context, mcpAgent *models.SMCPAgent, messages []OpenAIChatMessage, tools []OpenAITool, onChunk func(models.ILLMChatResponse) error) error {
  229. req := OpenAIChatRequest{
  230. Model: mcpAgent.Model,
  231. Messages: messages,
  232. Tools: tools,
  233. Stream: true,
  234. }
  235. reqBody, err := json.Marshal(req)
  236. if err != nil {
  237. return errors.Wrap(err, "marshal request")
  238. }
  239. endpoint := strings.TrimSuffix(mcpAgent.LLMUrl, "/")
  240. // Default to /v1/chat/completions if not specified and not a custom path
  241. if !strings.Contains(endpoint, "/chat/completions") {
  242. if strings.HasSuffix(endpoint, "/v1") {
  243. endpoint = endpoint + "/chat/completions"
  244. } else {
  245. endpoint = endpoint + "/v1/chat/completions"
  246. }
  247. }
  248. httpReq, err := http.NewRequestWithContext(ctx, "POST", endpoint, bytes.NewReader(reqBody))
  249. if err != nil {
  250. return errors.Wrap(err, "create request")
  251. }
  252. httpReq.Header.Set("Content-Type", "application/json")
  253. apiKey, err := mcpAgent.GetApiKey()
  254. if err != nil {
  255. return err
  256. }
  257. if apiKey != "" {
  258. httpReq.Header.Set("Authorization", "Bearer "+apiKey)
  259. }
  260. client := &http.Client{
  261. // Stream request no timeout
  262. Timeout: 0,
  263. }
  264. resp, err := client.Do(httpReq)
  265. if err != nil {
  266. return errors.Wrap(err, "do request")
  267. }
  268. defer resp.Body.Close()
  269. if resp.StatusCode != http.StatusOK {
  270. body, _ := io.ReadAll(resp.Body)
  271. return errors.Errorf("unexpected status code %d: %s", resp.StatusCode, string(body))
  272. }
  273. scanner := bufio.NewScanner(resp.Body)
  274. for scanner.Scan() {
  275. line := scanner.Text()
  276. line = strings.TrimSpace(line)
  277. if line == "" {
  278. continue
  279. }
  280. if !strings.HasPrefix(line, "data: ") {
  281. continue
  282. }
  283. data := strings.TrimPrefix(line, "data: ")
  284. if data == "[DONE]" {
  285. break
  286. }
  287. var chunk OpenAIChatStreamResponse
  288. if err := json.Unmarshal([]byte(data), &chunk); err != nil {
  289. return errors.Wrapf(err, "decode stream chunk: %s", data)
  290. }
  291. if onChunk != nil {
  292. if err := onChunk(&chunk); err != nil {
  293. return errors.Wrap(err, "process chunk")
  294. }
  295. }
  296. }
  297. if err := scanner.Err(); err != nil {
  298. return errors.Wrap(err, "read stream")
  299. }
  300. return nil
  301. }
  302. func (o *openai) doChatRequest(ctx context.Context, mcpAgent *models.SMCPAgent, messages []OpenAIChatMessage, tools []OpenAITool) (*OpenAIChatResponse, error) {
  303. req := OpenAIChatRequest{
  304. Model: mcpAgent.Model,
  305. Messages: messages,
  306. Tools: tools,
  307. }
  308. reqBody, err := json.Marshal(req)
  309. if err != nil {
  310. return nil, errors.Wrap(err, "marshal request")
  311. }
  312. endpoint := strings.TrimSuffix(mcpAgent.LLMUrl, "/")
  313. // Default to /v1/chat/completions if not specified and not a custom path
  314. if !strings.Contains(endpoint, "/chat/completions") {
  315. if strings.HasSuffix(endpoint, "/v1") {
  316. endpoint = endpoint + "/chat/completions"
  317. } else {
  318. endpoint = endpoint + "/v1/chat/completions"
  319. }
  320. }
  321. httpReq, err := http.NewRequestWithContext(ctx, "POST", endpoint, bytes.NewReader(reqBody))
  322. if err != nil {
  323. return nil, errors.Wrap(err, "create request")
  324. }
  325. httpReq.Header.Set("Content-Type", "application/json")
  326. apiKey, err := mcpAgent.GetApiKey()
  327. if err != nil {
  328. return nil, errors.Wrap(err, "get apiKey")
  329. }
  330. if apiKey != "" {
  331. httpReq.Header.Set("Authorization", "Bearer "+apiKey)
  332. }
  333. client := &http.Client{
  334. Timeout: 300 * time.Second,
  335. }
  336. resp, err := client.Do(httpReq)
  337. if err != nil {
  338. return nil, errors.Wrap(err, "do request")
  339. }
  340. defer resp.Body.Close()
  341. body, err := io.ReadAll(resp.Body)
  342. if err != nil {
  343. return nil, errors.Wrap(err, "read response body")
  344. }
  345. if resp.StatusCode != http.StatusOK {
  346. return nil, errors.Errorf("unexpected status code %d: %s", resp.StatusCode, string(body))
  347. }
  348. var chatResp OpenAIChatResponse
  349. if err := json.Unmarshal(body, &chatResp); err != nil {
  350. return nil, errors.Wrapf(err, "decode response: %s", string(body))
  351. }
  352. if len(chatResp.Choices) == 0 {
  353. return nil, errors.Error("no choices in response")
  354. }
  355. return &chatResp, nil
  356. }
  357. func (o *openai) NewUserMessage(content string) models.ILLMChatMessage {
  358. return &OpenAIChatMessage{
  359. Role: "user",
  360. Content: content,
  361. }
  362. }
  363. func (o *openai) NewAssistantMessage(content string) models.ILLMChatMessage {
  364. return &OpenAIChatMessage{
  365. Role: "assistant",
  366. Content: content,
  367. }
  368. }
  369. func (o *openai) NewAssistantMessageWithToolCalls(toolCalls []models.ILLMToolCall) models.ILLMChatMessage {
  370. openaiToolCalls := make([]OpenAIToolCall, len(toolCalls))
  371. for i, tc := range toolCalls {
  372. if otc, ok := tc.(*OpenAIToolCall); ok {
  373. openaiToolCalls[i] = *otc
  374. } else {
  375. fc := tc.GetFunction()
  376. argsBytes, _ := json.Marshal(fc.GetArguments())
  377. openaiToolCalls[i] = OpenAIToolCall{
  378. ID: tc.GetId(),
  379. Type: "function",
  380. Function: OpenAIFunctionCall{
  381. Name: fc.GetName(),
  382. Arguments: string(argsBytes),
  383. },
  384. }
  385. }
  386. }
  387. return &OpenAIChatMessage{
  388. Role: "assistant",
  389. ToolCalls: openaiToolCalls,
  390. }
  391. }
  392. func (o *openai) NewAssistantMessageWithToolCallsAndReasoning(reasoningContent, content string, toolCalls []models.ILLMToolCall) models.ILLMChatMessage {
  393. openaiToolCalls := make([]OpenAIToolCall, len(toolCalls))
  394. for i, tc := range toolCalls {
  395. if otc, ok := tc.(*OpenAIToolCall); ok {
  396. openaiToolCalls[i] = *otc
  397. } else {
  398. fc := tc.GetFunction()
  399. argsBytes, _ := json.Marshal(fc.GetArguments())
  400. openaiToolCalls[i] = OpenAIToolCall{
  401. ID: tc.GetId(),
  402. Type: "function",
  403. Function: OpenAIFunctionCall{
  404. Name: fc.GetName(),
  405. Arguments: string(argsBytes),
  406. },
  407. }
  408. }
  409. }
  410. return &OpenAIChatMessage{
  411. Role: "assistant",
  412. Content: content,
  413. ReasoningContent: reasoningContent,
  414. ToolCalls: openaiToolCalls,
  415. }
  416. }
  417. func (o *openai) NewToolMessage(toolId string, toolName string, content string) models.ILLMChatMessage {
  418. return &OpenAIChatMessage{
  419. Role: "tool",
  420. ToolCallID: toolId,
  421. Content: content,
  422. }
  423. }
  424. func (o *openai) NewSystemMessage(content string) models.ILLMChatMessage {
  425. return &OpenAIChatMessage{
  426. Role: "system",
  427. Content: content,
  428. }
  429. }
  430. func (o *openai) ConvertMCPTools(mcpTools []mcp.Tool) []models.ILLMTool {
  431. tools := make([]models.ILLMTool, len(mcpTools))
  432. for i, t := range mcpTools {
  433. var params map[string]interface{}
  434. if t.RawInputSchema != nil {
  435. _ = json.Unmarshal(t.RawInputSchema, &params)
  436. } else {
  437. schemaBytes, _ := json.Marshal(t.InputSchema)
  438. _ = json.Unmarshal(schemaBytes, &params)
  439. }
  440. tools[i] = &OpenAITool{
  441. Type: "function",
  442. Function: OpenAIToolFunction{
  443. Name: t.Name,
  444. Description: t.Description,
  445. Parameters: params,
  446. },
  447. }
  448. }
  449. return tools
  450. }
  451. // Structures
  452. type OpenAIChatMessage struct {
  453. Role string `json:"role"`
  454. Content string `json:"content"`
  455. ReasoningContent string `json:"reasoning_content,omitempty"`
  456. ToolCalls []OpenAIToolCall `json:"tool_calls,omitempty"`
  457. ToolCallID string `json:"tool_call_id,omitempty"`
  458. }
  459. func (m *OpenAIChatMessage) GetRole() string { return m.Role }
  460. func (m *OpenAIChatMessage) GetContent() string { return m.Content }
  461. func (m *OpenAIChatMessage) GetToolCalls() []models.ILLMToolCall {
  462. if len(m.ToolCalls) == 0 {
  463. return nil
  464. }
  465. toolCalls := make([]models.ILLMToolCall, len(m.ToolCalls))
  466. for i := range m.ToolCalls {
  467. tc := m.ToolCalls[i]
  468. toolCalls[i] = &tc
  469. }
  470. return toolCalls
  471. }
  472. type OpenAIToolCall struct {
  473. Index int `json:"index"`
  474. ID string `json:"id"`
  475. Type string `json:"type"`
  476. Function OpenAIFunctionCall `json:"function"`
  477. }
  478. func (tc *OpenAIToolCall) GetFunction() models.ILLMFunctionCall { return &tc.Function }
  479. func (tc *OpenAIToolCall) GetId() string { return tc.ID }
  480. func (tc *OpenAIToolCall) GetIndex() int { return tc.Index }
  481. type OpenAIFunctionCall struct {
  482. Name string `json:"name"`
  483. Arguments string `json:"arguments"`
  484. }
  485. func (fc *OpenAIFunctionCall) GetName() string { return fc.Name }
  486. func (fc *OpenAIFunctionCall) GetRawArguments() string { return fc.Arguments }
  487. func (fc *OpenAIFunctionCall) GetArguments() map[string]interface{} {
  488. var args map[string]interface{}
  489. _ = json.Unmarshal([]byte(fc.Arguments), &args)
  490. return args
  491. }
  492. type OpenAITool struct {
  493. Type string `json:"type"`
  494. Function OpenAIToolFunction `json:"function"`
  495. }
  496. func (t *OpenAITool) GetType() string { return t.Type }
  497. func (t *OpenAITool) GetFunction() models.ILLMToolFunction { return &t.Function }
  498. type OpenAIToolFunction struct {
  499. Name string `json:"name"`
  500. Description string `json:"description"`
  501. Parameters map[string]interface{} `json:"parameters"`
  502. }
  503. func (tf *OpenAIToolFunction) GetName() string { return tf.Name }
  504. func (tf *OpenAIToolFunction) GetDescription() string { return tf.Description }
  505. func (tf *OpenAIToolFunction) GetParameters() map[string]interface{} { return tf.Parameters }
  506. type OpenAIChatRequest struct {
  507. Model string `json:"model"`
  508. Messages []OpenAIChatMessage `json:"messages"`
  509. Tools []OpenAITool `json:"tools,omitempty"`
  510. Stream bool `json:"stream,omitempty"`
  511. }
  512. type OpenAIChatResponse struct {
  513. ID string `json:"id"`
  514. Choices []OpenAIChoice `json:"choices"`
  515. }
  516. type OpenAIChoice struct {
  517. Message OpenAIChatMessage `json:"message"`
  518. FinishReason string `json:"finish_reason"`
  519. }
  520. func (r *OpenAIChatResponse) GetContent() string {
  521. if len(r.Choices) > 0 {
  522. return r.Choices[0].Message.Content
  523. }
  524. return ""
  525. }
  526. func (r *OpenAIChatResponse) GetReasoningContent() string {
  527. if len(r.Choices) > 0 {
  528. return r.Choices[0].Message.ReasoningContent
  529. }
  530. return ""
  531. }
  532. func (r *OpenAIChatResponse) HasToolCalls() bool {
  533. return len(r.Choices) > 0 && len(r.Choices[0].Message.ToolCalls) > 0
  534. }
  535. func (r *OpenAIChatResponse) GetToolCalls() []models.ILLMToolCall {
  536. if len(r.Choices) == 0 {
  537. return nil
  538. }
  539. return r.Choices[0].Message.GetToolCalls()
  540. }
  541. type OpenAIChatStreamResponse struct {
  542. ID string `json:"id"`
  543. Choices []OpenAIChatStreamChoice `json:"choices"`
  544. }
  545. type OpenAIChatStreamChoice struct {
  546. Delta OpenAIChatStreamDelta `json:"delta"`
  547. FinishReason string `json:"finish_reason"`
  548. }
  549. type OpenAIChatStreamDelta struct {
  550. Role string `json:"role,omitempty"`
  551. Content string `json:"content,omitempty"`
  552. ReasoningContent string `json:"reasoning_content,omitempty"`
  553. ToolCalls []OpenAIToolCall `json:"tool_calls,omitempty"`
  554. }
  555. func (r *OpenAIChatStreamResponse) GetContent() string {
  556. if len(r.Choices) > 0 {
  557. return r.Choices[0].Delta.Content
  558. }
  559. return ""
  560. }
  561. func (r *OpenAIChatStreamResponse) GetReasoningContent() string {
  562. if len(r.Choices) > 0 {
  563. return r.Choices[0].Delta.ReasoningContent
  564. }
  565. return ""
  566. }
  567. func (r *OpenAIChatStreamResponse) HasToolCalls() bool {
  568. return len(r.Choices) > 0 && len(r.Choices[0].Delta.ToolCalls) > 0
  569. }
  570. func (r *OpenAIChatStreamResponse) GetToolCalls() []models.ILLMToolCall {
  571. if len(r.Choices) == 0 {
  572. return nil
  573. }
  574. toolCalls := make([]models.ILLMToolCall, len(r.Choices[0].Delta.ToolCalls))
  575. for i := range r.Choices[0].Delta.ToolCalls {
  576. tc := r.Choices[0].Delta.ToolCalls[i]
  577. toolCalls[i] = &tc
  578. }
  579. return toolCalls
  580. }