From 39385ea0a0d0c474b3e20354825270d0c6bdeba0 Mon Sep 17 00:00:00 2001 From: ssongliu <73214554+ssongliu@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:09:10 +0800 Subject: [PATCH] fix: Adjust the synchronization method for the Ollama model (#7895) --- backend/app/api/v1/ai.go | 46 +++- backend/app/dto/ai.go | 19 +- backend/app/dto/common_req.go | 5 + backend/app/model/ai.go | 11 + backend/app/repo/ai.go | 69 +++++ backend/app/service/ai.go | 269 ++++++++++++-------- backend/app/service/entry.go | 2 + backend/app/service/file.go | 8 +- backend/constant/status.go | 1 + backend/init/migration/migrate.go | 1 + backend/init/migration/migrations/v_1_10.go | 10 + backend/router/ro_ai.go | 2 + cmd/server/docs/docs.go | 124 ++++++++- cmd/server/docs/swagger.json | 124 ++++++++- cmd/server/docs/swagger.yaml | 80 +++++- frontend/src/api/interface/ai.ts | 11 +- frontend/src/api/modules/ai.ts | 10 +- frontend/src/components/log-file/index.vue | 3 +- frontend/src/lang/modules/en.ts | 3 + frontend/src/lang/modules/ja.ts | 3 + frontend/src/lang/modules/ko.ts | 3 + frontend/src/lang/modules/ms.ts | 4 + frontend/src/lang/modules/pt-br.ts | 4 + frontend/src/lang/modules/ru.ts | 4 + frontend/src/lang/modules/tw.ts | 3 + frontend/src/lang/modules/zh.ts | 3 + frontend/src/views/ai/model/del/index.vue | 118 +++++++++ frontend/src/views/ai/model/index.vue | 191 +++++++++++--- 28 files changed, 958 insertions(+), 173 deletions(-) create mode 100644 backend/app/model/ai.go create mode 100644 backend/app/repo/ai.go create mode 100644 frontend/src/views/ai/model/del/index.vue diff --git a/backend/app/api/v1/ai.go b/backend/app/api/v1/ai.go index f96e3f641..a4a1e4b72 100644 --- a/backend/app/api/v1/ai.go +++ b/backend/app/api/v1/ai.go @@ -32,6 +32,44 @@ func (b *BaseApi) CreateOllamaModel(c *gin.Context) { helper.SuccessWithData(c, nil) } +// @Tags AI +// @Summary Rereate Ollama model +// @Accept json +// @Param request body dto.OllamaModelName true "request" +// @Success 200 +// @Security ApiKeyAuth +// @Security Timestamp +// @Router /ai/ollama/model/recreate [post] +// @x-panel-log {"bodyKeys":["name"],"paramKeys":[],"BeforeFunctions":[],"formatZH":"添加模型重试 [name]","formatEN":"re-add Ollama model [name]"} +func (b *BaseApi) RecreateOllamaModel(c *gin.Context) { + var req dto.OllamaModelName + if err := helper.CheckBindAndValidate(&req, c); err != nil { + return + } + + if err := AIToolService.Recreate(req.Name); err != nil { + helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err) + return + } + helper.SuccessWithData(c, nil) +} + +// @Tags AI +// @Summary Sync Ollama model list +// @Success 200 {array} dto.OllamaModelDropList +// @Security ApiKeyAuth +// @Security Timestamp +// @Router /ai/ollama/model/sync [post] +// @x-panel-log {"bodyKeys":[],"paramKeys":[],"BeforeFunctions":[],"formatZH":"同步 Ollama 模型列表","formatEN":"sync Ollama model list"} +func (b *BaseApi) SyncOllamaModel(c *gin.Context) { + list, err := AIToolService.Sync() + if err != nil { + helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err) + return + } + helper.SuccessWithData(c, list) +} + // @Tags AI // @Summary Page Ollama models // @Accept json @@ -84,19 +122,19 @@ func (b *BaseApi) LoadOllamaModelDetail(c *gin.Context) { // @Tags AI // @Summary Delete Ollama model // @Accept json -// @Param request body dto.OllamaModelName true "request" +// @Param request body dto.ForceDelete true "request" // @Success 200 // @Security ApiKeyAuth // @Security Timestamp // @Router /ai/ollama/model/del [post] -// @x-panel-log {"bodyKeys":["name"],"paramKeys":[],"BeforeFunctions":[],"formatZH":"删除模型 [name]","formatEN":"remove Ollama model [name]"} +// @x-panel-log {"bodyKeys":["id"],"paramKeys":[],"BeforeFunctions":[{"input_column":"id","input_value":"id","isList":false,"db":"ollama_models","output_column":"name","output_value":"name"}],"formatZH":"删除 ollama 模型 [name]","formatEN":"remove ollama model [name]"} func (b *BaseApi) DeleteOllamaModel(c *gin.Context) { - var req dto.OllamaModelName + var req dto.ForceDelete if err := helper.CheckBindAndValidate(&req, c); err != nil { return } - if err := AIToolService.Delete(req.Name); err != nil { + if err := AIToolService.Delete(req); err != nil { helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err) return } diff --git a/backend/app/dto/ai.go b/backend/app/dto/ai.go index c6eeb92fa..1e0be554b 100644 --- a/backend/app/dto/ai.go +++ b/backend/app/dto/ai.go @@ -1,9 +1,22 @@ package dto +import "time" + type OllamaModelInfo struct { - Name string `json:"name"` - Size string `json:"size"` - Modified string `json:"modified"` + ID uint `json:"id"` + Name string `json:"name"` + Size string `json:"size"` + From string `json:"from"` + LogFileExist bool `json:"logFileExist"` + + Status string `json:"status"` + Message string `json:"message"` + CreatedAt time.Time `json:"createdAt"` +} + +type OllamaModelDropList struct { + ID uint `json:"id"` + Name string `json:"name"` } type OllamaModelName struct { diff --git a/backend/app/dto/common_req.go b/backend/app/dto/common_req.go index b493fac55..fc5b640b0 100644 --- a/backend/app/dto/common_req.go +++ b/backend/app/dto/common_req.go @@ -52,3 +52,8 @@ type OperationWithNameAndType struct { Name string `json:"name"` Type string `json:"type" validate:"required"` } + +type ForceDelete struct { + IDs []uint `json:"ids"` + ForceDelete bool `json:"forceDelete"` +} diff --git a/backend/app/model/ai.go b/backend/app/model/ai.go new file mode 100644 index 000000000..2165e96e8 --- /dev/null +++ b/backend/app/model/ai.go @@ -0,0 +1,11 @@ +package model + +type OllamaModel struct { + BaseModel + + Name string `json:"name"` + Size string `json:"size"` + From string `json:"from"` + Status string `json:"status"` + Message string `json:"message"` +} diff --git a/backend/app/repo/ai.go b/backend/app/repo/ai.go new file mode 100644 index 000000000..beae042b5 --- /dev/null +++ b/backend/app/repo/ai.go @@ -0,0 +1,69 @@ +package repo + +import ( + "github.com/1Panel-dev/1Panel/backend/app/model" + "github.com/1Panel-dev/1Panel/backend/global" +) + +type AiRepo struct{} + +type IAiRepo interface { + Get(opts ...DBOption) (model.OllamaModel, error) + List(opts ...DBOption) ([]model.OllamaModel, error) + Page(limit, offset int, opts ...DBOption) (int64, []model.OllamaModel, error) + Create(cronjob *model.OllamaModel) error + Update(id uint, vars map[string]interface{}) error + Delete(opts ...DBOption) error +} + +func NewIAiRepo() IAiRepo { + return &AiRepo{} +} + +func (u *AiRepo) Get(opts ...DBOption) (model.OllamaModel, error) { + var item model.OllamaModel + db := global.DB + for _, opt := range opts { + db = opt(db) + } + err := db.First(&item).Error + return item, err +} + +func (u *AiRepo) List(opts ...DBOption) ([]model.OllamaModel, error) { + var list []model.OllamaModel + db := global.DB.Model(&model.OllamaModel{}) + for _, opt := range opts { + db = opt(db) + } + err := db.Find(&list).Error + return list, err +} + +func (u *AiRepo) Page(page, size int, opts ...DBOption) (int64, []model.OllamaModel, error) { + var list []model.OllamaModel + db := global.DB.Model(&model.OllamaModel{}) + for _, opt := range opts { + db = opt(db) + } + count := int64(0) + db = db.Count(&count) + err := db.Limit(size).Offset(size * (page - 1)).Find(&list).Error + return count, list, err +} + +func (u *AiRepo) Create(item *model.OllamaModel) error { + return global.DB.Create(item).Error +} + +func (u *AiRepo) Update(id uint, vars map[string]interface{}) error { + return global.DB.Model(&model.OllamaModel{}).Where("id = ?", id).Updates(vars).Error +} + +func (u *AiRepo) Delete(opts ...DBOption) error { + db := global.DB + for _, opt := range opts { + db = opt(db) + } + return db.Delete(&model.OllamaModel{}).Error +} diff --git a/backend/app/service/ai.go b/backend/app/service/ai.go index 4de584af6..ebff2a129 100644 --- a/backend/app/service/ai.go +++ b/backend/app/service/ai.go @@ -12,10 +12,14 @@ import ( "github.com/1Panel-dev/1Panel/backend/app/dto" "github.com/1Panel-dev/1Panel/backend/app/dto/request" + "github.com/1Panel-dev/1Panel/backend/app/model" + "github.com/1Panel-dev/1Panel/backend/app/repo" "github.com/1Panel-dev/1Panel/backend/buserr" "github.com/1Panel-dev/1Panel/backend/constant" "github.com/1Panel-dev/1Panel/backend/global" "github.com/1Panel-dev/1Panel/backend/utils/cmd" + "github.com/jinzhu/copier" + "github.com/pkg/errors" ) type AIToolService struct{} @@ -23,7 +27,9 @@ type AIToolService struct{} type IAIToolService interface { Search(search dto.SearchWithPage) (int64, []dto.OllamaModelInfo, error) Create(name string) error - Delete(name string) error + Recreate(name string) error + Delete(req dto.ForceDelete) error + Sync() ([]dto.OllamaModelDropList, error) LoadDetail(name string) (string, error) BindDomain(req dto.OllamaBindDomain) error GetBindDomain(req dto.OllamaBindDomainReq) (*dto.OllamaBindDomainRes, error) @@ -35,78 +41,38 @@ func NewIAIToolService() IAIToolService { } func (u *AIToolService) Search(req dto.SearchWithPage) (int64, []dto.OllamaModelInfo, error) { - ollamaBaseInfo, err := appInstallRepo.LoadBaseInfo("ollama", "") - if err != nil { - return 0, nil, err - } - if ollamaBaseInfo.Status != constant.Running { - return 0, nil, nil - } - stdout, err := cmd.Execf("docker exec %s ollama list", ollamaBaseInfo.ContainerName) - if err != nil { - return 0, nil, err - } - var list []dto.OllamaModelInfo - modelMaps := make(map[string]struct{}) - lines := strings.Split(stdout, "\n") - for _, line := range lines { - parts := strings.Fields(line) - if len(parts) < 5 { - continue - } - if parts[0] == "NAME" { - continue - } - modelMaps[strings.ReplaceAll(parts[0], ":", "-")] = struct{}{} - list = append(list, dto.OllamaModelInfo{Name: parts[0], Size: parts[2] + " " + parts[3], Modified: strings.Join(parts[4:], " ")}) - } - entries, _ := os.ReadDir(path.Join(global.CONF.System.DataDir, "log", "AITools")) - for _, item := range entries { - if _, ok := modelMaps[item.Name()]; ok { - continue - } - if _, ok := modelMaps[item.Name()+":latest"]; ok { - continue - } - list = append(list, dto.OllamaModelInfo{Name: item.Name(), Size: "-", Modified: "-"}) - } + var options []repo.DBOption if len(req.Info) != 0 { - length, count := len(list), 0 - for count < length { - if !strings.Contains(list[count].Name, req.Info) { - list = append(list[:count], list[(count+1):]...) - length-- - } else { - count++ - } - } + options = append(options, commonRepo.WithLikeName(req.Info)) } - - var records []dto.OllamaModelInfo - total, start, end := len(list), (req.Page-1)*req.PageSize, req.Page*req.PageSize - if start > total { - records = make([]dto.OllamaModelInfo, 0) - } else { - if end >= total { - end = total - } - records = list[start:end] + total, list, err := aiRepo.Page(req.Page, req.PageSize, options...) + if err != nil { + return 0, nil, err } - return int64(total), records, err + var dtoLists []dto.OllamaModelInfo + for _, itemModel := range list { + var item dto.OllamaModelInfo + if err := copier.Copy(&item, &itemModel); err != nil { + return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error()) + } + logPath := path.Join(global.CONF.System.DataDir, "log", "AITools", itemModel.Name) + if _, err := os.Stat(logPath); err == nil { + item.LogFileExist = true + } + dtoLists = append(dtoLists, item) + } + return int64(total), dtoLists, err } func (u *AIToolService) LoadDetail(name string) (string, error) { if cmd.CheckIllegal(name) { return "", buserr.New(constant.ErrCmdIllegal) } - ollamaBaseInfo, err := appInstallRepo.LoadBaseInfo("ollama", "") + containerName, err := loadContainerName() if err != nil { return "", err } - if ollamaBaseInfo.Status != constant.Running { - return "", nil - } - stdout, err := cmd.Execf("docker exec %s ollama show %s", ollamaBaseInfo.ContainerName, name) + stdout, err := cmd.Execf("docker exec %s ollama show %s", containerName, name) if err != nil { return "", err } @@ -117,15 +83,52 @@ func (u *AIToolService) Create(name string) error { if cmd.CheckIllegal(name) { return buserr.New(constant.ErrCmdIllegal) } - ollamaBaseInfo, err := appInstallRepo.LoadBaseInfo("ollama", "") + modelInfo, _ := aiRepo.Get(commonRepo.WithByName(name)) + if modelInfo.ID != 0 { + return constant.ErrRecordExist + } + containerName, err := loadContainerName() if err != nil { return err } - if ollamaBaseInfo.Status != constant.Running { - return nil + logItem := path.Join(global.CONF.System.DataDir, "log", "AITools", name) + if _, err := os.Stat(path.Dir(logItem)); err != nil && os.IsNotExist(err) { + if err = os.MkdirAll(path.Dir(logItem), os.ModePerm); err != nil { + return err + } } - fileName := strings.ReplaceAll(name, ":", "-") - logItem := path.Join(global.CONF.System.DataDir, "log", "AITools", fileName) + info := model.OllamaModel{ + Name: name, + From: "local", + Status: constant.StatusWaiting, + } + if err := aiRepo.Create(&info); err != nil { + return err + } + file, err := os.OpenFile(logItem, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) + if err != nil { + return err + } + go pullOllamaModel(file, containerName, info) + return nil +} + +func (u *AIToolService) Recreate(name string) error { + if cmd.CheckIllegal(name) { + return buserr.New(constant.ErrCmdIllegal) + } + modelInfo, _ := aiRepo.Get(commonRepo.WithByName(name)) + if modelInfo.ID == 0 { + return constant.ErrRecordNotFound + } + containerName, err := loadContainerName() + if err != nil { + return err + } + if err := aiRepo.Update(modelInfo.ID, map[string]interface{}{"status": constant.StatusWaiting, "from": "local"}); err != nil { + return err + } + logItem := path.Join(global.CONF.System.DataDir, "log", "AITools", name) if _, err := os.Stat(path.Dir(logItem)); err != nil && os.IsNotExist(err) { if err = os.MkdirAll(path.Dir(logItem), os.ModePerm); err != nil { return err @@ -135,40 +138,41 @@ func (u *AIToolService) Create(name string) error { if err != nil { return err } - go func() { - defer file.Close() - cmd := exec.Command("docker", "exec", ollamaBaseInfo.ContainerName, "ollama", "run", name) - multiWriter := io.MultiWriter(os.Stdout, file) - cmd.Stdout = multiWriter - cmd.Stderr = multiWriter - if err := cmd.Run(); err != nil { - global.LOG.Errorf("ollama pull %s failed, err: %v", name, err) - _, _ = file.WriteString("ollama pull failed!") - return - } - global.LOG.Infof("ollama pull %s successful!", name) - _, _ = file.WriteString("ollama pull successful!") - }() - + go pullOllamaModel(file, containerName, modelInfo) return nil } -func (u *AIToolService) Delete(name string) error { - if cmd.CheckIllegal(name) { - return buserr.New(constant.ErrCmdIllegal) +func (u *AIToolService) Delete(req dto.ForceDelete) error { + ollamaList, _ := aiRepo.List(commonRepo.WithIdsIn(req.IDs)) + if len(ollamaList) == 0 { + return constant.ErrRecordNotFound } - ollamaBaseInfo, err := appInstallRepo.LoadBaseInfo("ollama", "") - if err != nil { + containerName, err := loadContainerName() + if err != nil && !req.ForceDelete { return err } - if ollamaBaseInfo.Status != constant.Running { - return nil + for _, item := range ollamaList { + stdout, err := cmd.Execf("docker exec %s ollama rm %s", containerName, item.Name) + if err != nil && !req.ForceDelete { + return fmt.Errorf("handle ollama rm %s failed, stdout: %s, err: %v", item.Name, stdout, err) + } + _ = aiRepo.Delete(commonRepo.WithByID(item.ID)) + logItem := path.Join(global.CONF.System.DataDir, "log", "AITools", item.Name) + _ = os.Remove(logItem) } - stdout, err := cmd.Execf("docker exec %s ollama list", ollamaBaseInfo.ContainerName) + return nil +} + +func (u *AIToolService) Sync() ([]dto.OllamaModelDropList, error) { + containerName, err := loadContainerName() if err != nil { - return err + return nil, err } - isExist := false + stdout, err := cmd.Execf("docker exec %s ollama list", containerName) + if err != nil { + return nil, err + } + var list []model.OllamaModel lines := strings.Split(stdout, "\n") for _, line := range lines { parts := strings.Fields(line) @@ -178,25 +182,33 @@ func (u *AIToolService) Delete(name string) error { if parts[0] == "NAME" { continue } - if parts[0] == name { - isExist = true - break + list = append(list, model.OllamaModel{Name: parts[0], Size: parts[2] + " " + parts[3]}) + } + listInDB, _ := aiRepo.List() + var dropList []dto.OllamaModelDropList + for _, itemModel := range listInDB { + isExit := false + for i := 0; i < len(list); i++ { + if list[i].Name == itemModel.Name { + _ = aiRepo.Update(itemModel.ID, map[string]interface{}{"status": constant.StatusSuccess, "message": "", "size": list[i].Size}) + list = append(list[:i], list[(i+1):]...) + isExit = true + break + } } + if !isExit && itemModel.Status != constant.StatusWaiting { + _ = aiRepo.Update(itemModel.ID, map[string]interface{}{"status": constant.StatusDeleted, "message": "not exist", "size": ""}) + dropList = append(dropList, dto.OllamaModelDropList{ID: itemModel.ID, Name: itemModel.Name}) + continue + } + } + for _, item := range list { + item.Status = constant.StatusSuccess + item.From = "remote" + _ = aiRepo.Create(&item) } - if isExist { - stdout, err := cmd.Execf("docker exec %s ollama rm %s", ollamaBaseInfo.ContainerName, name) - if err != nil { - return fmt.Errorf("handle ollama rm %s failed, stdout: %s, err: %v", name, stdout, err) - } - } - logItem := path.Join(global.CONF.System.DataDir, "log", "AITools", name) - _ = os.Remove(logItem) - logItem2 := path.Join(global.CONF.System.DataDir, "log", "AITools", strings.TrimSuffix(name, ":latest")) - if logItem2 != logItem { - _ = os.Remove(logItem2) - } - return nil + return dropList, nil } func (u *AIToolService) BindDomain(req dto.OllamaBindDomain) error { @@ -318,3 +330,46 @@ func (u *AIToolService) UpdateBindDomain(req dto.OllamaBindDomain) error { } return nil } + +func loadContainerName() (string, error) { + ollamaBaseInfo, err := appInstallRepo.LoadBaseInfo("ollama", "") + if err != nil { + return "", fmt.Errorf("ollama service is not found, err: %v", err) + } + if ollamaBaseInfo.Status != constant.Running { + return "", fmt.Errorf("container %s of ollama is not running, please check and retry!", ollamaBaseInfo.ContainerName) + } + return ollamaBaseInfo.ContainerName, nil +} + +func pullOllamaModel(file *os.File, containerName string, info model.OllamaModel) { + defer file.Close() + cmd := exec.Command("docker", "exec", containerName, "ollama", "pull", info.Name) + multiWriter := io.MultiWriter(os.Stdout, file) + cmd.Stdout = multiWriter + cmd.Stderr = multiWriter + _ = cmd.Run() + itemSize, err := loadModelSize(info.Name, containerName) + if len(itemSize) != 0 { + _ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusSuccess, "size": itemSize}) + } else { + _ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusFailed, "message": err.Error()}) + } + _, _ = file.WriteString("ollama pull completed!") +} + +func loadModelSize(name string, containerName string) (string, error) { + stdout, err := cmd.Execf("docker exec %s ollama list | grep %s", containerName, name) + if err != nil { + return "", err + } + lines := strings.Split(string(stdout), "\n") + for _, line := range lines { + parts := strings.Fields(line) + if len(parts) < 5 { + continue + } + return parts[2] + " " + parts[3], nil + } + return "", fmt.Errorf("no such model %s in ollama list, std: %s", name, string(stdout)) +} diff --git a/backend/app/service/entry.go b/backend/app/service/entry.go index 2c72dee06..0b91e5346 100644 --- a/backend/app/service/entry.go +++ b/backend/app/service/entry.go @@ -12,6 +12,8 @@ var ( appInstallRepo = repo.NewIAppInstallRepo() appInstallResourceRepo = repo.NewIAppInstallResourceRpo() + aiRepo = repo.NewIAiRepo() + mysqlRepo = repo.NewIMysqlRepo() postgresqlRepo = repo.NewIPostgresqlRepo() databaseRepo = repo.NewIDatabaseRepo() diff --git a/backend/app/service/file.go b/backend/app/service/file.go index bd3f19550..854352744 100644 --- a/backend/app/service/file.go +++ b/backend/app/service/file.go @@ -480,13 +480,7 @@ func (f *FileService) ReadLogByLine(req request.FileReadByLineReq) (*response.Fi case "image-pull", "image-push", "image-build", "compose-create": logFilePath = path.Join(global.CONF.System.TmpDir, fmt.Sprintf("docker_logs/%s", req.Name)) case "ollama-model": - fileName := strings.ReplaceAll(req.Name, ":", "-") - if _, err := os.Stat(fileName); err != nil { - if strings.HasSuffix(req.Name, ":latest") { - fileName = strings.TrimSuffix(req.Name, ":latest") - } - } - logFilePath = path.Join(global.CONF.System.DataDir, "log", "AITools", fileName) + logFilePath = path.Join(global.CONF.System.DataDir, "log", "AITools", req.Name) } lines, isEndOfFile, total, err := files.ReadFileByLine(logFilePath, req.Page, req.PageSize, req.Latest) diff --git a/backend/constant/status.go b/backend/constant/status.go index d71580013..284155837 100644 --- a/backend/constant/status.go +++ b/backend/constant/status.go @@ -6,6 +6,7 @@ const ( StatusWaiting = "Waiting" StatusSuccess = "Success" StatusFailed = "Failed" + StatusDeleted = "Deleted" StatusUploading = "Uploading" StatusEnable = "Enable" StatusDisable = "Disable" diff --git a/backend/init/migration/migrate.go b/backend/init/migration/migrate.go index 4ca6e8ef8..e73515842 100644 --- a/backend/init/migration/migrate.go +++ b/backend/init/migration/migrate.go @@ -102,6 +102,7 @@ func Init() { migrations.UpdateAppTag, migrations.UpdateApp, + migrations.AddOllamaModel, }) if err := m.Migrate(); err != nil { global.LOG.Error(err) diff --git a/backend/init/migration/migrations/v_1_10.go b/backend/init/migration/migrations/v_1_10.go index 8967fe837..656fc0932 100644 --- a/backend/init/migration/migrations/v_1_10.go +++ b/backend/init/migration/migrations/v_1_10.go @@ -380,3 +380,13 @@ var UpdateApp = &gormigrate.Migration{ return nil }, } + +var AddOllamaModel = &gormigrate.Migration{ + ID: "20250218-add-ollama-model", + Migrate: func(tx *gorm.DB) error { + if err := tx.AutoMigrate(&model.OllamaModel{}); err != nil { + return err + } + return nil + }, +} diff --git a/backend/router/ro_ai.go b/backend/router/ro_ai.go index 734101866..7f1bf3eca 100644 --- a/backend/router/ro_ai.go +++ b/backend/router/ro_ai.go @@ -16,7 +16,9 @@ func (a *AIToolsRouter) InitRouter(Router *gin.RouterGroup) { baseApi := v1.ApiGroupApp.BaseApi { aiToolsRouter.POST("/ollama/model", baseApi.CreateOllamaModel) + aiToolsRouter.POST("/ollama/model/recreate", baseApi.RecreateOllamaModel) aiToolsRouter.POST("/ollama/model/search", baseApi.SearchOllamaModel) + aiToolsRouter.POST("/ollama/model/sync", baseApi.SyncOllamaModel) aiToolsRouter.POST("/ollama/model/load", baseApi.LoadOllamaModelDetail) aiToolsRouter.POST("/ollama/model/del", baseApi.DeleteOllamaModel) aiToolsRouter.GET("/gpu/load", baseApi.LoadGpuInfo) diff --git a/cmd/server/docs/docs.go b/cmd/server/docs/docs.go index 718397f65..85e1663c6 100644 --- a/cmd/server/docs/docs.go +++ b/cmd/server/docs/docs.go @@ -185,7 +185,7 @@ const docTemplate = `{ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/dto.OllamaModelName" + "$ref": "#/definitions/dto.ForceDelete" } } ], @@ -195,12 +195,21 @@ const docTemplate = `{ } }, "x-panel-log": { - "BeforeFunctions": [], - "bodyKeys": [ - "name" + "BeforeFunctions": [ + { + "db": "ollama_models", + "input_column": "id", + "input_value": "id", + "isList": false, + "output_column": "name", + "output_value": "name" + } ], - "formatEN": "remove Ollama model [name]", - "formatZH": "删除模型 [name]", + "bodyKeys": [ + "id" + ], + "formatEN": "remove ollama model [name]", + "formatZH": "删除 ollama 模型 [name]", "paramKeys": [] } } @@ -243,6 +252,50 @@ const docTemplate = `{ } } }, + "/ai/ollama/model/recreate": { + "post": { + "security": [ + { + "ApiKeyAuth": [] + }, + { + "Timestamp": [] + } + ], + "consumes": [ + "application/json" + ], + "tags": [ + "AI" + ], + "summary": "Rereate Ollama model", + "parameters": [ + { + "description": "request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/dto.OllamaModelName" + } + } + ], + "responses": { + "200": { + "description": "OK" + } + }, + "x-panel-log": { + "BeforeFunctions": [], + "bodyKeys": [ + "name" + ], + "formatEN": "re-add Ollama model [name]", + "formatZH": "添加模型重试 [name]", + "paramKeys": [] + } + } + }, "/ai/ollama/model/search": { "post": { "security": [ @@ -281,6 +334,40 @@ const docTemplate = `{ } } }, + "/ai/ollama/model/sync": { + "post": { + "security": [ + { + "ApiKeyAuth": [] + }, + { + "Timestamp": [] + } + ], + "tags": [ + "AI" + ], + "summary": "Sync Ollama model list", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/dto.OllamaModelDropList" + } + } + } + }, + "x-panel-log": { + "BeforeFunctions": [], + "bodyKeys": [], + "formatEN": "sync Ollama model list", + "formatZH": "同步 Ollama 模型列表", + "paramKeys": [] + } + } + }, "/apps/checkupdate": { "get": { "security": [ @@ -19234,6 +19321,20 @@ const docTemplate = `{ } } }, + "dto.ForceDelete": { + "type": "object", + "properties": { + "forceDelete": { + "type": "boolean" + }, + "ids": { + "type": "array", + "items": { + "type": "integer" + } + } + } + }, "dto.ForwardRuleOperate": { "type": "object", "properties": { @@ -20583,6 +20684,17 @@ const docTemplate = `{ } } }, + "dto.OllamaModelDropList": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + }, "dto.OllamaModelName": { "type": "object", "properties": { diff --git a/cmd/server/docs/swagger.json b/cmd/server/docs/swagger.json index 599a34829..3430b0e73 100644 --- a/cmd/server/docs/swagger.json +++ b/cmd/server/docs/swagger.json @@ -182,7 +182,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/dto.OllamaModelName" + "$ref": "#/definitions/dto.ForceDelete" } } ], @@ -192,12 +192,21 @@ } }, "x-panel-log": { - "BeforeFunctions": [], - "bodyKeys": [ - "name" + "BeforeFunctions": [ + { + "db": "ollama_models", + "input_column": "id", + "input_value": "id", + "isList": false, + "output_column": "name", + "output_value": "name" + } ], - "formatEN": "remove Ollama model [name]", - "formatZH": "删除模型 [name]", + "bodyKeys": [ + "id" + ], + "formatEN": "remove ollama model [name]", + "formatZH": "删除 ollama 模型 [name]", "paramKeys": [] } } @@ -240,6 +249,50 @@ } } }, + "/ai/ollama/model/recreate": { + "post": { + "security": [ + { + "ApiKeyAuth": [] + }, + { + "Timestamp": [] + } + ], + "consumes": [ + "application/json" + ], + "tags": [ + "AI" + ], + "summary": "Rereate Ollama model", + "parameters": [ + { + "description": "request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/dto.OllamaModelName" + } + } + ], + "responses": { + "200": { + "description": "OK" + } + }, + "x-panel-log": { + "BeforeFunctions": [], + "bodyKeys": [ + "name" + ], + "formatEN": "re-add Ollama model [name]", + "formatZH": "添加模型重试 [name]", + "paramKeys": [] + } + } + }, "/ai/ollama/model/search": { "post": { "security": [ @@ -278,6 +331,40 @@ } } }, + "/ai/ollama/model/sync": { + "post": { + "security": [ + { + "ApiKeyAuth": [] + }, + { + "Timestamp": [] + } + ], + "tags": [ + "AI" + ], + "summary": "Sync Ollama model list", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/dto.OllamaModelDropList" + } + } + } + }, + "x-panel-log": { + "BeforeFunctions": [], + "bodyKeys": [], + "formatEN": "sync Ollama model list", + "formatZH": "同步 Ollama 模型列表", + "paramKeys": [] + } + } + }, "/apps/checkupdate": { "get": { "security": [ @@ -19231,6 +19318,20 @@ } } }, + "dto.ForceDelete": { + "type": "object", + "properties": { + "forceDelete": { + "type": "boolean" + }, + "ids": { + "type": "array", + "items": { + "type": "integer" + } + } + } + }, "dto.ForwardRuleOperate": { "type": "object", "properties": { @@ -20580,6 +20681,17 @@ } } }, + "dto.OllamaModelDropList": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + }, "dto.OllamaModelName": { "type": "object", "properties": { diff --git a/cmd/server/docs/swagger.yaml b/cmd/server/docs/swagger.yaml index 3c728e1e5..6a2f6880f 100644 --- a/cmd/server/docs/swagger.yaml +++ b/cmd/server/docs/swagger.yaml @@ -1567,6 +1567,15 @@ definitions: - type - vars type: object + dto.ForceDelete: + properties: + forceDelete: + type: boolean + ids: + items: + type: integer + type: array + type: object dto.ForwardRuleOperate: properties: rules: @@ -2484,6 +2493,13 @@ definitions: websiteID: type: integer type: object + dto.OllamaModelDropList: + properties: + id: + type: integer + name: + type: string + type: object dto.OllamaModelName: properties: name: @@ -6579,7 +6595,7 @@ paths: name: request required: true schema: - $ref: '#/definitions/dto.OllamaModelName' + $ref: '#/definitions/dto.ForceDelete' responses: "200": description: OK @@ -6590,11 +6606,17 @@ paths: tags: - AI x-panel-log: - BeforeFunctions: [] + BeforeFunctions: + - db: ollama_models + input_column: id + input_value: id + isList: false + output_column: name + output_value: name bodyKeys: - - name - formatEN: remove Ollama model [name] - formatZH: 删除模型 [name] + - id + formatEN: remove ollama model [name] + formatZH: 删除 ollama 模型 [name] paramKeys: [] /ai/ollama/model/load: post: @@ -6618,6 +6640,33 @@ paths: summary: Page Ollama models tags: - AI + /ai/ollama/model/recreate: + post: + consumes: + - application/json + parameters: + - description: request + in: body + name: request + required: true + schema: + $ref: '#/definitions/dto.OllamaModelName' + responses: + "200": + description: OK + security: + - ApiKeyAuth: [] + - Timestamp: [] + summary: Rereate Ollama model + tags: + - AI + x-panel-log: + BeforeFunctions: [] + bodyKeys: + - name + formatEN: re-add Ollama model [name] + formatZH: 添加模型重试 [name] + paramKeys: [] /ai/ollama/model/search: post: consumes: @@ -6640,6 +6689,27 @@ paths: summary: Page Ollama models tags: - AI + /ai/ollama/model/sync: + post: + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/dto.OllamaModelDropList' + type: array + security: + - ApiKeyAuth: [] + - Timestamp: [] + summary: Sync Ollama model list + tags: + - AI + x-panel-log: + BeforeFunctions: [] + bodyKeys: [] + formatEN: sync Ollama model list + formatZH: 同步 Ollama 模型列表 + paramKeys: [] /apps/{key}: get: consumes: diff --git a/frontend/src/api/interface/ai.ts b/frontend/src/api/interface/ai.ts index fef177397..87e0d5c39 100644 --- a/frontend/src/api/interface/ai.ts +++ b/frontend/src/api/interface/ai.ts @@ -2,9 +2,18 @@ import { ReqPage } from '.'; export namespace AI { export interface OllamaModelInfo { + id: number; name: string; size: string; - modified: string; + from: string; + logFileExist: boolean; + status: string; + message: string; + createdAt: Date; + } + export interface OllamaModelDropInfo { + id: number; + name: string; } export interface OllamaModelSearch extends ReqPage { info: string; diff --git a/frontend/src/api/modules/ai.ts b/frontend/src/api/modules/ai.ts index 270f15574..84d0568eb 100644 --- a/frontend/src/api/modules/ai.ts +++ b/frontend/src/api/modules/ai.ts @@ -5,8 +5,11 @@ import { ResPage } from '../interface'; export const createOllamaModel = (name: string) => { return http.post(`/ai/ollama/model`, { name: name }); }; -export const deleteOllamaModel = (name: string) => { - return http.post(`/ai/ollama/model/del`, { name: name }); +export const recreateOllamaModel = (name: string) => { + return http.post(`/ai/ollama/model/recreate`, { name: name }); +}; +export const deleteOllamaModel = (ids: Array, force: boolean) => { + return http.post(`/ai/ollama/model/del`, { ids: ids, forceDelete: force }); }; export const searchOllamaModel = (params: AI.OllamaModelSearch) => { return http.post>(`/ai/ollama/model/search`, params); @@ -14,6 +17,9 @@ export const searchOllamaModel = (params: AI.OllamaModelSearch) => { export const loadOllamaModel = (name: string) => { return http.post(`/ai/ollama/model/load`, { name: name }); }; +export const syncOllamaModel = () => { + return http.post>(`/ai/ollama/model/sync`); +}; export const loadGPUInfo = () => { return http.get(`/ai/gpu/load`); diff --git a/frontend/src/components/log-file/index.vue b/frontend/src/components/log-file/index.vue index 828d91665..846b745e2 100644 --- a/frontend/src/components/log-file/index.vue +++ b/frontend/src/components/log-file/index.vue @@ -72,8 +72,7 @@ const stopSignals = [ 'image pull successful!', 'image push failed!', 'image push successful!', - 'ollama pull failed!', - 'ollama pull successful!', + 'ollama pull completed!', ]; const emit = defineEmits(['update:loading', 'update:hasContent', 'update:isReading']); const tailLog = ref(false); diff --git a/frontend/src/lang/modules/en.ts b/frontend/src/lang/modules/en.ts index 2bdfe9179..3430fec1b 100644 --- a/frontend/src/lang/modules/en.ts +++ b/frontend/src/lang/modules/en.ts @@ -600,6 +600,9 @@ const message = { create_helper: 'Pull "{0}" from Ollama.com', ollama_doc: 'You can visit the Ollama official website to search and find more models.', container_conn_helper: 'Use this address for inter-container access or connection', + ollama_sync: 'Syncing Ollama model found the following models do not exist, do you want to delete them?', + from_remote: 'This model was not downloaded via 1Panel, no related pull logs.', + no_logs: 'The pull logs for this model have been deleted and cannot be viewed.', }, gpu: { gpu: 'GPU Monitor', diff --git a/frontend/src/lang/modules/ja.ts b/frontend/src/lang/modules/ja.ts index 357b73aba..0a8d34fe8 100644 --- a/frontend/src/lang/modules/ja.ts +++ b/frontend/src/lang/modules/ja.ts @@ -600,6 +600,9 @@ const message = { create_helper: 'Ollama.com から "{0}" を取得', ollama_doc: 'Ollama の公式ウェブサイトを訪れて、さらに多くのモデルを検索して見つけることができます。', container_conn_helper: 'コンテナ間のアクセスまたは接続にこのアドレスを使用', + ollama_sync: 'Ollamaモデルの同期中に、以下のモデルが存在しないことが判明しました。削除しますか?', + from_remote: 'このモデルは1Panelを介してダウンロードされておらず、関連するプルログはありません。', + no_logs: 'このモデルのプルログは削除されており、関連するログを表示できません。', }, gpu: { gpu: 'GPUモニター', diff --git a/frontend/src/lang/modules/ko.ts b/frontend/src/lang/modules/ko.ts index d552ebb7d..c7c36b05b 100644 --- a/frontend/src/lang/modules/ko.ts +++ b/frontend/src/lang/modules/ko.ts @@ -596,6 +596,9 @@ const message = { create_helper: 'Ollama.com에서 "{0}" 가져오기', ollama_doc: 'Ollama 공식 웹사이트를 방문하여 더 많은 모델을 검색하고 찾을 수 있습니다.', container_conn_helper: '컨테이너 간 접근 또는 연결에 이 주소를 사용', + ollama_sync: 'Ollama 모델 동기화 중 다음 모델이 존재하지 않음을 발견했습니다. 삭제하시겠습니까?', + from_remote: '이 모델은 1Panel을 통해 다운로드되지 않았으며 관련 풀 로그가 없습니다.', + no_logs: '이 모델의 풀 로그가 삭제되어 관련 로그를 볼 수 없습니다.', }, gpu: { gpu: 'GPU 모니터', diff --git a/frontend/src/lang/modules/ms.ts b/frontend/src/lang/modules/ms.ts index d4614bfc7..451b2647e 100644 --- a/frontend/src/lang/modules/ms.ts +++ b/frontend/src/lang/modules/ms.ts @@ -611,6 +611,10 @@ const message = { create_helper: 'Tarik "{0}" dari Ollama.com', ollama_doc: 'Anda boleh melawat laman web rasmi Ollama untuk mencari dan menemui lebih banyak model.', container_conn_helper: 'Gunakan alamat ini untuk akses atau sambungan antara kontena', + ollama_sync: + 'Sincronizando o modelo Ollama, encontrou que os seguintes modelos não existem, deseja excluí-los?', + from_remote: 'Este modelo não foi baixado via 1Panel, sem logs de pull relacionados.', + no_logs: 'Os logs de pull deste modelo foram excluídos e não podem ser visualizados.', }, gpu: { gpu: 'Monitor GPU', diff --git a/frontend/src/lang/modules/pt-br.ts b/frontend/src/lang/modules/pt-br.ts index 48ddfdce5..282ecdb03 100644 --- a/frontend/src/lang/modules/pt-br.ts +++ b/frontend/src/lang/modules/pt-br.ts @@ -608,6 +608,10 @@ const message = { create_helper: 'Puxar "{0}" do Ollama.com', ollama_doc: 'Você pode visitar o site oficial da Ollama para pesquisar e encontrar mais modelos.', container_conn_helper: 'Use este endereço para acesso ou conexão entre contêineres', + ollama_sync: + 'Menyelaraskan model Ollama mendapati model berikut tidak wujud, adakah anda ingin memadamnya?', + from_remote: 'Model ini tidak dimuat turun melalui 1Panel, tiada log pengambilan berkaitan.', + no_logs: 'Log pengambilan untuk model ini telah dipadam dan tidak dapat dilihat.', }, gpu: { gpu: 'Monitor de GPU', diff --git a/frontend/src/lang/modules/ru.ts b/frontend/src/lang/modules/ru.ts index 099e15391..c2928a4a0 100644 --- a/frontend/src/lang/modules/ru.ts +++ b/frontend/src/lang/modules/ru.ts @@ -606,6 +606,10 @@ const message = { create_helper: 'Загрузить "{0}" с Ollama.com', ollama_doc: 'Вы можете посетить официальный сайт Ollama, чтобы искать и находить больше моделей.', container_conn_helper: 'Используйте этот адрес для доступа или подключения между контейнерами', + ollama_sync: + 'Синхронизация модели Ollama обнаружила, что следующие модели не существуют, хотите удалить их?', + from_remote: 'Эта модель не была загружена через 1Panel, нет связанных журналов извлечения.', + no_logs: 'Журналы извлечения для этой модели были удалены и не могут быть просмотрены.', }, gpu: { gpu: 'Мониторинг GPU', diff --git a/frontend/src/lang/modules/tw.ts b/frontend/src/lang/modules/tw.ts index 9e6323ab6..4558b9e0a 100644 --- a/frontend/src/lang/modules/tw.ts +++ b/frontend/src/lang/modules/tw.ts @@ -581,6 +581,9 @@ const message = { create_helper: '從 Ollama.com 拉取 "{0}"', ollama_doc: '您可以訪問 Ollama 官方網站,搜索並查找更多模型。', container_conn_helper: '容器間訪問或連接使用此地址', + ollama_sync: '同步 Ollama 模型發現下列模型不存在,是否刪除?', + from_remote: '該模型並非通過 1Panel 下載,無相關拉取日誌。', + no_logs: '該模型的拉取日誌已被刪除,無法查看相關日誌。', }, gpu: { gpu: 'GPU 监控', diff --git a/frontend/src/lang/modules/zh.ts b/frontend/src/lang/modules/zh.ts index 8c78f1d62..d4720a440 100644 --- a/frontend/src/lang/modules/zh.ts +++ b/frontend/src/lang/modules/zh.ts @@ -582,6 +582,9 @@ const message = { create_helper: '从 Ollama.com 拉取 "{0}"', ollama_doc: '您可以访问 Ollama 官网,搜索并查找更多模型。', container_conn_helper: '容器间访问或连接使用此地址', + ollama_sync: '同步 Ollama 模型发现下列模型不存在,是否删除?', + from_remote: '该模型并非通过 1Panel 下载,无相关拉取日志。', + no_logs: '该模型的拉取日志已被删除,无法查看相关日志。', }, gpu: { gpu: 'GPU 监控', diff --git a/frontend/src/views/ai/model/del/index.vue b/frontend/src/views/ai/model/del/index.vue new file mode 100644 index 000000000..53e161468 --- /dev/null +++ b/frontend/src/views/ai/model/del/index.vue @@ -0,0 +1,118 @@ + + + diff --git a/frontend/src/views/ai/model/index.vue b/frontend/src/views/ai/model/index.vue index cf1f9d93a..7e1ac3973 100644 --- a/frontend/src/views/ai/model/index.vue +++ b/frontend/src/views/ai/model/index.vue @@ -33,6 +33,9 @@ {{ $t('database.databaseConnInfo') }} + + {{ $t('database.loadFromRemote') }} + OpenWebUI + + {{ $t('commons.button.delete') }} +
@@ -51,36 +57,61 @@