llm_sku.go 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. package llm
  2. import (
  3. "yunion.io/x/jsonutils"
  4. api "yunion.io/x/onecloud/pkg/apis/llm"
  5. "yunion.io/x/onecloud/pkg/mcclient/options"
  6. )
  7. type LLMSkuListOptions struct {
  8. options.BaseListOptions
  9. LLMType string `json:"llm_type" choices:"ollama|comfyui|openclaw"`
  10. }
  11. func (o *LLMSkuListOptions) Params() (jsonutils.JSONObject, error) {
  12. return options.ListStructToParams(o)
  13. }
  14. type LLMSkuShowOptions struct {
  15. options.BaseShowOptions
  16. }
  17. func (o *LLMSkuShowOptions) Params() (jsonutils.JSONObject, error) {
  18. return options.StructToParams(o)
  19. }
  20. type LLMSkuCreateOptions struct {
  21. LLMSkuBaseCreateOptions
  22. MountedModels []string `help:"mounted models, <model_id> e.g. qwen2:0.5b-dup" json:"mounted_models"`
  23. LLM_IMAGE_ID string `json:"llm_image_id"`
  24. LLM_TYPE string `json:"llm_type" choices:"ollama|vllm|comfyui"`
  25. PreferredModel string `help:"preferred model (vllm only), sets llm_spec.vllm.preferred_model" json:"-"`
  26. VllmArg []string `help:"vLLM args in format key=value; use key= for flags without values" json:"-"`
  27. }
  28. func (o *LLMSkuCreateOptions) Params() (jsonutils.JSONObject, error) {
  29. dict := jsonutils.NewDict()
  30. obj := jsonutils.Marshal(o)
  31. obj.Unmarshal(dict)
  32. if err := o.LLMSkuBaseCreateOptions.Params(dict); err != nil {
  33. return nil, err
  34. }
  35. fetchMountedModels(o.MountedModels, dict)
  36. vllmSpec, err := newVLLMSpecFromArgs(o.PreferredModel, o.VllmArg)
  37. if err != nil {
  38. return nil, err
  39. }
  40. if o.LLM_TYPE == string(api.LLM_CONTAINER_VLLM) && vllmSpec != nil {
  41. spec := &api.LLMSpec{
  42. Ollama: nil,
  43. Vllm: vllmSpec,
  44. Dify: nil,
  45. }
  46. dict.Set("llm_spec", jsonutils.Marshal(spec))
  47. }
  48. return dict, nil
  49. }
  50. type LLMSkuDeleteOptions struct {
  51. options.BaseIdOptions
  52. }
  53. func (o *LLMSkuDeleteOptions) GetId() string {
  54. return o.ID
  55. }
  56. func (o *LLMSkuDeleteOptions) Params() (jsonutils.JSONObject, error) {
  57. return options.StructToParams(o)
  58. }
  59. type LLMSkuUpdateOptions struct {
  60. LLMSkuBaseUpdateOptions
  61. MountedModels []string `help:"mounted models, <model_id> e.g. qwen2:0.5b-dup" json:"mounted_models"`
  62. // For ollama/vllm; backend merges into LLMSpec. Use dify-sku update for dify type.
  63. LlmImageId string `json:"llm_image_id"`
  64. PreferredModel string `help:"preferred model (vllm only), sets llm_spec.vllm.preferred_model" json:"-"`
  65. VllmArg []string `help:"vLLM args in format key=value; use key= for flags without values" json:"-"`
  66. }
  67. func (o *LLMSkuUpdateOptions) GetId() string {
  68. return o.ID
  69. }
  70. func (o *LLMSkuUpdateOptions) Params() (jsonutils.JSONObject, error) {
  71. dict := jsonutils.NewDict()
  72. obj := jsonutils.Marshal(o)
  73. obj.Unmarshal(dict)
  74. if err := o.LLMSkuBaseUpdateOptions.Params(dict); err != nil {
  75. return nil, err
  76. }
  77. fetchMountedModels(o.MountedModels, dict)
  78. vllmSpec, err := newVLLMSpecFromArgs(o.PreferredModel, o.VllmArg)
  79. if err != nil {
  80. return nil, err
  81. }
  82. if vllmSpec != nil {
  83. spec := &api.LLMSpec{
  84. Ollama: nil,
  85. Vllm: vllmSpec,
  86. Dify: nil,
  87. }
  88. dict.Set("llm_spec", jsonutils.Marshal(spec))
  89. }
  90. return dict, nil
  91. }