mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2025-12-28 18:28:32 +08:00
feat: support edit command for tensorrtLLM (#10708)
This commit is contained in:
parent
dd70be51b3
commit
f94e60c53c
8 changed files with 205 additions and 112 deletions
|
|
@ -10,12 +10,11 @@ type TensorRTLLMSearch struct {
|
|||
type TensorRTLLMCreate struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
ContainerName string `json:"containerName" validate:"required"`
|
||||
Port int `json:"port" validate:"required"`
|
||||
Version string `json:"version" validate:"required"`
|
||||
ModelDir string `json:"modelDir" validate:"required"`
|
||||
Model string `json:"model" validate:"required"`
|
||||
HostIP string `json:"hostIP"`
|
||||
Image string `json:"image" validate:"required"`
|
||||
Command string `json:"command" validate:"required"`
|
||||
DockerConfig
|
||||
}
|
||||
|
||||
type TensorRTLLMUpdate struct {
|
||||
|
|
@ -31,3 +30,9 @@ type TensorRTLLMOperate struct {
|
|||
ID uint `json:"id" validate:"required"`
|
||||
Operate string `json:"operate" validate:"required"`
|
||||
}
|
||||
|
||||
type DockerConfig struct {
|
||||
ExposedPorts []ExposedPort `json:"exposedPorts"`
|
||||
Environments []Environment `json:"environments"`
|
||||
Volumes []Volume `json:"volumes"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
package response
|
||||
|
||||
import "github.com/1Panel-dev/1Panel/agent/app/model"
|
||||
import (
|
||||
"github.com/1Panel-dev/1Panel/agent/app/dto/request"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/model"
|
||||
)
|
||||
|
||||
type TensorRTLLMsRes struct {
|
||||
Items []TensorRTLLMDTO `json:"items"`
|
||||
|
|
@ -14,4 +17,9 @@ type TensorRTLLMDTO struct {
|
|||
Dir string `json:"dir"`
|
||||
ModelDir string `json:"modelDir"`
|
||||
Image string `json:"image"`
|
||||
Command string `json:"command"`
|
||||
|
||||
ExposedPorts []request.ExposedPort `json:"exposedPorts"`
|
||||
Environments []request.Environment `json:"environments"`
|
||||
Volumes []request.Volume `json:"volumes"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ type TensorRTLLM struct {
|
|||
DockerCompose string `json:"dockerCompose"`
|
||||
ContainerName string `json:"containerName"`
|
||||
Message string `json:"message"`
|
||||
Port int `json:"port"`
|
||||
Status string `json:"status"`
|
||||
Env string `json:"env"`
|
||||
TaskID string `json:"taskID"`
|
||||
//Port int `json:"port"`
|
||||
Status string `json:"status"`
|
||||
Env string `json:"env"`
|
||||
TaskID string `json:"taskID"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
package service
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/dto/request"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/dto/response"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/model"
|
||||
|
|
@ -15,7 +16,9 @@ import (
|
|||
"github.com/subosito/gotenv"
|
||||
"gopkg.in/yaml.v3"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TensorRTLLMService struct{}
|
||||
|
|
@ -44,12 +47,67 @@ func (t TensorRTLLMService) Page(req request.TensorRTLLMSearch) response.TensorR
|
|||
serverDTO := response.TensorRTLLMDTO{
|
||||
TensorRTLLM: item,
|
||||
}
|
||||
env, _ := gotenv.Unmarshal(item.Env)
|
||||
serverDTO.Version = env["VERSION"]
|
||||
serverDTO.Model = env["MODEL_NAME"]
|
||||
serverDTO.ModelDir = env["MODEL_PATH"]
|
||||
envs, _ := gotenv.Unmarshal(item.Env)
|
||||
serverDTO.Version = envs["VERSION"]
|
||||
serverDTO.ModelDir = envs["MODEL_PATH"]
|
||||
serverDTO.Dir = path.Join(global.Dir.TensorRTLLMDir, item.Name)
|
||||
serverDTO.Image = env["IMAGE"]
|
||||
serverDTO.Image = envs["IMAGE"]
|
||||
serverDTO.Command = envs["COMMAND"]
|
||||
|
||||
for k, v := range envs {
|
||||
if strings.Contains(k, "CONTAINER_PORT") || strings.Contains(k, "HOST_PORT") {
|
||||
if strings.Contains(k, "CONTAINER_PORT") {
|
||||
r := regexp.MustCompile(`_(\d+)$`)
|
||||
matches := r.FindStringSubmatch(k)
|
||||
containerPort, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
hostPort, err := strconv.Atoi(envs[fmt.Sprintf("HOST_PORT_%s", matches[1])])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
hostIP := envs[fmt.Sprintf("HOST_IP_%s", matches[1])]
|
||||
if hostIP == "" {
|
||||
hostIP = "0.0.0.0"
|
||||
}
|
||||
serverDTO.ExposedPorts = append(serverDTO.ExposedPorts, request.ExposedPort{
|
||||
ContainerPort: containerPort,
|
||||
HostPort: hostPort,
|
||||
HostIP: hostIP,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
composeByte, err := files.NewFileOp().GetContent(path.Join(global.Dir.TensorRTLLMDir, item.Name, "docker-compose.yml"))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
serverDTO.Environments, err = getDockerComposeEnvironments(composeByte)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
volumes, err := getDockerComposeVolumes(composeByte)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
var defaultVolumes = map[string]string{
|
||||
"${MODEL_PATH}": "/models",
|
||||
}
|
||||
for _, volume := range volumes {
|
||||
exist := false
|
||||
for key, value := range defaultVolumes {
|
||||
if key == volume.Source && value == volume.Target {
|
||||
exist = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exist {
|
||||
serverDTO.Volumes = append(serverDTO.Volumes, volume)
|
||||
}
|
||||
}
|
||||
items = append(items, serverDTO)
|
||||
}
|
||||
res.Total = total
|
||||
|
|
@ -57,10 +115,9 @@ func (t TensorRTLLMService) Page(req request.TensorRTLLMSearch) response.TensorR
|
|||
return res
|
||||
}
|
||||
|
||||
func handleLLMParams(llm *model.TensorRTLLM) error {
|
||||
func handleLLMParams(llm *model.TensorRTLLM, create request.TensorRTLLMCreate) error {
|
||||
var composeContent []byte
|
||||
if llm.ID == 0 {
|
||||
//nvcr.io/nvidia/tensorrt-llm/release
|
||||
composeContent = ai.DefaultTensorrtLLMCompose
|
||||
} else {
|
||||
composeContent = []byte(llm.DockerCompose)
|
||||
|
|
@ -88,6 +145,39 @@ func handleLLMParams(llm *model.TensorRTLLM) error {
|
|||
delete(services, serviceName)
|
||||
}
|
||||
|
||||
delete(serviceValue, "ports")
|
||||
if len(create.ExposedPorts) > 0 {
|
||||
var ports []interface{}
|
||||
for i := range create.ExposedPorts {
|
||||
containerPortStr := fmt.Sprintf("CONTAINER_PORT_%d", i)
|
||||
hostPortStr := fmt.Sprintf("HOST_PORT_%d", i)
|
||||
hostIPStr := fmt.Sprintf("HOST_IP_%d", i)
|
||||
ports = append(ports, fmt.Sprintf("${%s}:${%s}:${%s}", hostIPStr, hostPortStr, containerPortStr))
|
||||
}
|
||||
serviceValue["ports"] = ports
|
||||
}
|
||||
|
||||
delete(serviceValue, "environment")
|
||||
var environments []interface{}
|
||||
for _, e := range create.Environments {
|
||||
environments = append(environments, fmt.Sprintf("%s=%s", e.Key, e.Value))
|
||||
}
|
||||
if len(environments) > 0 {
|
||||
serviceValue["environment"] = environments
|
||||
}
|
||||
|
||||
var volumes []interface{}
|
||||
var defaultVolumes = map[string]string{
|
||||
"${MODEL_PATH}": "/models",
|
||||
}
|
||||
for k, v := range defaultVolumes {
|
||||
volumes = append(volumes, fmt.Sprintf("%s:%s", k, v))
|
||||
}
|
||||
for _, volume := range create.Volumes {
|
||||
volumes = append(volumes, fmt.Sprintf("%s:%s", volume.Source, volume.Target))
|
||||
}
|
||||
serviceValue["volumes"] = volumes
|
||||
|
||||
services[llm.Name] = serviceValue
|
||||
composeByte, err := yaml.Marshal(composeMap)
|
||||
if err != nil {
|
||||
|
|
@ -100,15 +190,17 @@ func handleLLMParams(llm *model.TensorRTLLM) error {
|
|||
func handleLLMEnv(llm *model.TensorRTLLM, create request.TensorRTLLMCreate) gotenv.Env {
|
||||
env := make(gotenv.Env)
|
||||
env["CONTAINER_NAME"] = create.ContainerName
|
||||
env["PANEL_APP_PORT_HTTP"] = strconv.Itoa(llm.Port)
|
||||
env["MODEL_PATH"] = create.ModelDir
|
||||
env["MODEL_NAME"] = create.Model
|
||||
env["VERSION"] = create.Version
|
||||
env["IMAGE"] = create.Image
|
||||
if create.HostIP != "" {
|
||||
env["HOST_IP"] = create.HostIP
|
||||
} else {
|
||||
env["HOST_IP"] = ""
|
||||
env["COMMAND"] = create.Command
|
||||
for i, port := range create.ExposedPorts {
|
||||
containerPortStr := fmt.Sprintf("CONTAINER_PORT_%d", i)
|
||||
hostPortStr := fmt.Sprintf("HOST_PORT_%d", i)
|
||||
hostIPStr := fmt.Sprintf("HOST_IP_%d", i)
|
||||
env[containerPortStr] = strconv.Itoa(port.ContainerPort)
|
||||
env[hostPortStr] = strconv.Itoa(port.HostPort)
|
||||
env[hostIPStr] = port.HostIP
|
||||
}
|
||||
envStr, _ := gotenv.Marshal(env)
|
||||
llm.Env = envStr
|
||||
|
|
@ -118,9 +210,6 @@ func handleLLMEnv(llm *model.TensorRTLLM, create request.TensorRTLLMCreate) gote
|
|||
func (t TensorRTLLMService) Create(create request.TensorRTLLMCreate) error {
|
||||
servers, _ := tensorrtLLMRepo.List()
|
||||
for _, server := range servers {
|
||||
if server.Port == create.Port {
|
||||
return buserr.New("ErrPortInUsed")
|
||||
}
|
||||
if server.ContainerName == create.ContainerName {
|
||||
return buserr.New("ErrContainerName")
|
||||
}
|
||||
|
|
@ -128,8 +217,10 @@ func (t TensorRTLLMService) Create(create request.TensorRTLLMCreate) error {
|
|||
return buserr.New("ErrNameIsExist")
|
||||
}
|
||||
}
|
||||
if err := checkPortExist(create.Port); err != nil {
|
||||
return err
|
||||
for _, export := range create.ExposedPorts {
|
||||
if err := checkPortExist(export.HostPort); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := checkContainerName(create.ContainerName); err != nil {
|
||||
return err
|
||||
|
|
@ -143,11 +234,10 @@ func (t TensorRTLLMService) Create(create request.TensorRTLLMCreate) error {
|
|||
tensorrtLLM := &model.TensorRTLLM{
|
||||
Name: create.Name,
|
||||
ContainerName: create.ContainerName,
|
||||
Port: create.Port,
|
||||
Status: constant.StatusStarting,
|
||||
}
|
||||
|
||||
if err := handleLLMParams(tensorrtLLM); err != nil {
|
||||
if err := handleLLMParams(tensorrtLLM, create); err != nil {
|
||||
return err
|
||||
}
|
||||
env := handleLLMEnv(tensorrtLLM, create)
|
||||
|
|
@ -174,11 +264,6 @@ func (t TensorRTLLMService) Update(req request.TensorRTLLMUpdate) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if tensorrtLLM.Port != req.Port {
|
||||
if err := checkPortExist(req.Port); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if tensorrtLLM.ContainerName != req.ContainerName {
|
||||
if err := checkContainerName(req.ContainerName); err != nil {
|
||||
return err
|
||||
|
|
@ -186,31 +271,20 @@ func (t TensorRTLLMService) Update(req request.TensorRTLLMUpdate) error {
|
|||
}
|
||||
|
||||
tensorrtLLM.ContainerName = req.ContainerName
|
||||
tensorrtLLM.Port = req.Port
|
||||
if err := handleLLMParams(tensorrtLLM); err != nil {
|
||||
if err := handleLLMParams(tensorrtLLM, req.TensorRTLLMCreate); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newEnv, err := gotenv.Unmarshal(tensorrtLLM.Env)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newEnv["CONTAINER_NAME"] = req.ContainerName
|
||||
newEnv["PANEL_APP_PORT_HTTP"] = strconv.Itoa(tensorrtLLM.Port)
|
||||
newEnv["MODEL_PATH"] = req.ModelDir
|
||||
newEnv["MODEL_NAME"] = req.Model
|
||||
newEnv["VERSION"] = req.Version
|
||||
newEnv["IMAGE"] = req.Image
|
||||
if req.HostIP != "" {
|
||||
newEnv["HOST_IP"] = req.HostIP
|
||||
} else {
|
||||
newEnv["HOST_IP"] = ""
|
||||
}
|
||||
envStr, _ := gotenv.Marshal(newEnv)
|
||||
env := handleLLMEnv(tensorrtLLM, req.TensorRTLLMCreate)
|
||||
envStr, _ := gotenv.Marshal(env)
|
||||
tensorrtLLM.Env = envStr
|
||||
llmDir := path.Join(global.Dir.TensorRTLLMDir, tensorrtLLM.Name)
|
||||
envPath := path.Join(llmDir, ".env")
|
||||
if err := gotenv.Write(newEnv, envPath); err != nil {
|
||||
if err := gotenv.Write(env, envPath); err != nil {
|
||||
return err
|
||||
}
|
||||
dockerComposePath := path.Join(llmDir, "docker-compose.yml")
|
||||
if err := files.NewFileOp().SaveFile(dockerComposePath, tensorrtLLM.DockerCompose, 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
tensorrtLLM.Status = constant.StatusStarting
|
||||
|
|
|
|||
|
|
@ -15,8 +15,6 @@ services:
|
|||
- 1panel-network
|
||||
volumes:
|
||||
- ${MODEL_PATH}:/models
|
||||
ports:
|
||||
- ${PANEL_APP_PORT_HTTP}:8000
|
||||
ipc: host
|
||||
ulimits:
|
||||
memlock:
|
||||
|
|
@ -26,7 +24,7 @@ services:
|
|||
soft: 65535
|
||||
hard: 65535
|
||||
stack: 67108864
|
||||
command: bash -c "trtllm-serve /models/${MODEL_NAME} --host 0.0.0.0 --port 8000"
|
||||
command: ${COMMAND}
|
||||
networks:
|
||||
1panel-network:
|
||||
external: true
|
||||
|
|
@ -184,18 +184,33 @@ export namespace AI {
|
|||
environments: Environment[];
|
||||
}
|
||||
|
||||
export interface ExposedPort {
|
||||
hostPort: number;
|
||||
containerPort: number;
|
||||
hostIP: string;
|
||||
}
|
||||
|
||||
export interface Environment {
|
||||
key: string;
|
||||
value: string;
|
||||
}
|
||||
export interface Volume {
|
||||
source: string;
|
||||
target: string;
|
||||
}
|
||||
|
||||
export interface TensorRTLLM {
|
||||
id?: number;
|
||||
name: string;
|
||||
containerName: string;
|
||||
port: number;
|
||||
version: string;
|
||||
modelDir: string;
|
||||
model: string;
|
||||
hostIP: string;
|
||||
status?: string;
|
||||
message?: string;
|
||||
createdAt?: string;
|
||||
exposedPorts?: ExposedPort[];
|
||||
environments?: Environment[];
|
||||
volumes?: Volume[];
|
||||
}
|
||||
|
||||
export interface TensorRTLLMDTO extends TensorRTLLM {
|
||||
|
|
|
|||
|
|
@ -26,14 +26,12 @@
|
|||
prop="name"
|
||||
show-overflow-tooltip
|
||||
/>
|
||||
<el-table-column :label="$t('commons.table.port')" min-width="80" prop="port" />
|
||||
<el-table-column :label="$t('app.version')" min-width="100" prop="version" show-overflow-tooltip />
|
||||
<el-table-column
|
||||
:label="$t('aiTools.model.model')"
|
||||
min-width="120"
|
||||
prop="model"
|
||||
show-overflow-tooltip
|
||||
/>
|
||||
<el-table-column :label="$t('commons.table.port')" min-width="80" prop="port">
|
||||
<template #default="{ row }">
|
||||
<PortJump :row="row" :jump="goDashboard" />
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column :label="$t('commons.table.status')" min-width="100" prop="status">
|
||||
<template #default="{ row }">
|
||||
<Status :key="row.status" :status="row.status"></Status>
|
||||
|
|
@ -62,8 +60,8 @@
|
|||
fix
|
||||
/>
|
||||
<fu-table-operations
|
||||
:ellipsis="mobile ? 0 : 2"
|
||||
:min-width="mobile ? 'auto' : 200"
|
||||
:ellipsis="mobile ? 0 : 5"
|
||||
:min-width="mobile ? 'auto' : 300"
|
||||
:buttons="buttons"
|
||||
:label="$t('commons.table.operate')"
|
||||
fixed="right"
|
||||
|
|
@ -75,13 +73,16 @@
|
|||
<OpDialog ref="opRef" @search="search" />
|
||||
<OperateDialog @search="search" ref="dialogRef" />
|
||||
<ComposeLogs ref="composeLogRef" />
|
||||
<PortJumpDialog ref="dialogPortJumpRef" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import ComposeLogs from '@/components/log/compose/index.vue';
|
||||
import OperateDialog from './operate/index.vue';
|
||||
import RouterMenu from '@/views/ai/model/index.vue';
|
||||
import ComposeLogs from '@/components/log/compose/index.vue';
|
||||
import PortJumpDialog from '@/components/port-jump/index.vue';
|
||||
import PortJump from '@/views/website/runtime/components/port-jump.vue';
|
||||
|
||||
import { reactive, onMounted, ref } from 'vue';
|
||||
import { dateFormat } from '@/utils/util';
|
||||
|
|
@ -109,6 +110,7 @@ const searchName = ref();
|
|||
const opRef = ref();
|
||||
const dialogRef = ref();
|
||||
const composeLogRef = ref();
|
||||
const dialogPortJumpRef = ref();
|
||||
|
||||
const search = async () => {
|
||||
const params = {
|
||||
|
|
@ -135,6 +137,10 @@ const openEdit = (row: AI.TensorRTLLM) => {
|
|||
dialogRef.value.openEdit(row);
|
||||
};
|
||||
|
||||
const goDashboard = async (port: any, protocol: string) => {
|
||||
dialogPortJumpRef.value.acceptParams({ port: port, protocol: protocol });
|
||||
};
|
||||
|
||||
const openLog = (row: AI.McpServer) => {
|
||||
composeLogRef.value.acceptParams({
|
||||
compose: row.dir + '/docker-compose.yml',
|
||||
|
|
|
|||
|
|
@ -13,23 +13,6 @@
|
|||
<el-form-item :label="$t('app.version')" prop="version">
|
||||
<el-input v-model.trim="tensorRTLLM.version" />
|
||||
</el-form-item>
|
||||
<el-row :gutter="20">
|
||||
<el-col :span="8">
|
||||
<el-form-item :label="$t('commons.table.port')" prop="port">
|
||||
<el-input v-model.number="tensorRTLLM.port" />
|
||||
</el-form-item>
|
||||
</el-col>
|
||||
<el-col :span="6">
|
||||
<el-form-item :label="$t('app.allowPort')" prop="hostIP">
|
||||
<el-switch
|
||||
v-model="tensorRTLLM.hostIP"
|
||||
:active-value="'0.0.0.0'"
|
||||
:inactive-value="'127.0.0.1'"
|
||||
/>
|
||||
</el-form-item>
|
||||
</el-col>
|
||||
</el-row>
|
||||
|
||||
<el-form-item :label="$t('aiTools.tensorRT.modelDir')" prop="modelDir">
|
||||
<el-input v-model="tensorRTLLM.modelDir">
|
||||
<template #prepend>
|
||||
|
|
@ -37,16 +20,18 @@
|
|||
</template>
|
||||
</el-input>
|
||||
</el-form-item>
|
||||
<el-form-item :label="$t('aiTools.model.model')" prop="model">
|
||||
<el-input v-model="tensorRTLLM.model">
|
||||
<template #prepend>
|
||||
<el-button
|
||||
icon="Folder"
|
||||
@click="modelRef.acceptParams({ path: tensorRTLLM.modelDir, dir: true })"
|
||||
/>
|
||||
</template>
|
||||
</el-input>
|
||||
<el-form-item :label="$t('runtime.runScript')" prop="command">
|
||||
<el-input v-model="tensorRTLLM.command"></el-input>
|
||||
</el-form-item>
|
||||
<el-tabs type="border-card">
|
||||
<el-tab-pane :label="$t('commons.table.port')">
|
||||
<PortConfig v-model="tensorRTLLM" :mode="mode" />
|
||||
</el-tab-pane>
|
||||
<el-tab-pane :label="$t('runtime.environment')">
|
||||
<Environment :environments="tensorRTLLM.environments" />
|
||||
</el-tab-pane>
|
||||
<el-tab-pane :label="$t('container.mount')"><Volumes :volumes="tensorRTLLM.volumes" /></el-tab-pane>
|
||||
</el-tabs>
|
||||
</el-form>
|
||||
<template #footer>
|
||||
<span class="dialog-footer">
|
||||
|
|
@ -57,17 +42,20 @@
|
|||
</span>
|
||||
</template>
|
||||
<FileList ref="modelDirRef" @choose="getModelDir" />
|
||||
<FileList ref="modelRef" @choose="getModelPath" />
|
||||
</DrawerPro>
|
||||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import PortConfig from '@/views/website/runtime/port/index.vue';
|
||||
import Environment from '@/views/website/runtime/environment/index.vue';
|
||||
import Volumes from '@/views/website/runtime/volume/index.vue';
|
||||
import DrawerPro from '@/components/drawer-pro/index.vue';
|
||||
import FileList from '@/components/file-list/index.vue';
|
||||
|
||||
import { reactive, ref } from 'vue';
|
||||
import { Rules } from '@/global/form-rules';
|
||||
import i18n from '@/lang';
|
||||
import { ElForm, FormInstance } from 'element-plus';
|
||||
import DrawerPro from '@/components/drawer-pro/index.vue';
|
||||
import FileList from '@/components/file-list/index.vue';
|
||||
import { createTensorRTLLM, updateTensorRTLLM } from '@/api/modules/ai';
|
||||
import { MsgSuccess } from '@/utils/message';
|
||||
|
||||
|
|
@ -78,16 +66,16 @@ const newTensorRTLLM = () => {
|
|||
return {
|
||||
name: '',
|
||||
containerName: '',
|
||||
port: 8000,
|
||||
version: '1.2.0rc0',
|
||||
modelDir: '',
|
||||
model: '',
|
||||
hostIP: '',
|
||||
image: 'nvcr.io/nvidia/tensorrt-llm/release',
|
||||
command: 'bash -c "trtllm-serve /models/ --host 0.0.0.0 --port 8000"',
|
||||
exposedPorts: [],
|
||||
environments: [],
|
||||
volumes: [],
|
||||
};
|
||||
};
|
||||
const modelDirRef = ref();
|
||||
const modelRef = ref();
|
||||
const tensorRTLLM = ref(newTensorRTLLM());
|
||||
const emit = defineEmits(['search']);
|
||||
|
||||
|
|
@ -99,6 +87,15 @@ const openCreate = (): void => {
|
|||
const openEdit = (rowData: any): void => {
|
||||
mode.value = 'edit';
|
||||
tensorRTLLM.value = { ...rowData };
|
||||
if (tensorRTLLM.value.environments == null) {
|
||||
tensorRTLLM.value.environments = [];
|
||||
}
|
||||
if (tensorRTLLM.value.volumes == null) {
|
||||
tensorRTLLM.value.volumes = [];
|
||||
}
|
||||
if (tensorRTLLM.value.exposedPorts == null) {
|
||||
tensorRTLLM.value.exposedPorts = [];
|
||||
}
|
||||
drawerVisiable.value = true;
|
||||
};
|
||||
|
||||
|
|
@ -110,23 +107,13 @@ const getModelDir = (path: string) => {
|
|||
tensorRTLLM.value.modelDir = path;
|
||||
};
|
||||
|
||||
const getModelPath = (path: string) => {
|
||||
const modelDir = tensorRTLLM.value.modelDir;
|
||||
if (modelDir && path.startsWith(modelDir)) {
|
||||
tensorRTLLM.value.model = path.replace(modelDir, '').replace(/^[\/\\]+/, '');
|
||||
} else {
|
||||
tensorRTLLM.value.model = path;
|
||||
}
|
||||
};
|
||||
|
||||
const rules = reactive({
|
||||
name: [Rules.requiredInput],
|
||||
port: [Rules.requiredInput],
|
||||
version: [Rules.requiredInput],
|
||||
modelDir: [Rules.requiredInput],
|
||||
model: [Rules.requiredInput],
|
||||
containerName: [Rules.requiredInput],
|
||||
image: [Rules.requiredInput],
|
||||
command: [Rules.requiredInput],
|
||||
});
|
||||
|
||||
const formRef = ref<FormInstance>();
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue