mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2025-10-06 13:27:43 +08:00
feat: Support for asynchronously obtaining the backup file size (#7660)
This commit is contained in:
parent
95ec6c62ef
commit
83db40e261
8 changed files with 186 additions and 28 deletions
|
@ -162,6 +162,29 @@ func (b *BaseApi) SearchBackupRecords(c *gin.Context) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @Tags Backup Account
|
||||||
|
// @Summary Load backup records size
|
||||||
|
// @Accept json
|
||||||
|
// @Param request body dto.RecordSearch true "request"
|
||||||
|
// @Success 200 {array} dto.dto.BackupFile
|
||||||
|
// @Security ApiKeyAuth
|
||||||
|
// @Security Timestamp
|
||||||
|
// @Router /settings/backup/record/size [post]
|
||||||
|
func (b *BaseApi) LoadBackupSize(c *gin.Context) {
|
||||||
|
var req dto.RecordSearch
|
||||||
|
if err := helper.CheckBindAndValidate(&req, c); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
list, err := backupService.LoadSize(req)
|
||||||
|
if err != nil {
|
||||||
|
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
helper.SuccessWithData(c, list)
|
||||||
|
}
|
||||||
|
|
||||||
// @Tags Backup Account
|
// @Tags Backup Account
|
||||||
// @Summary Page backup records by cronjob
|
// @Summary Page backup records by cronjob
|
||||||
// @Accept json
|
// @Accept json
|
||||||
|
@ -188,6 +211,29 @@ func (b *BaseApi) SearchBackupRecordsByCronjob(c *gin.Context) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @Tags Backup Account
|
||||||
|
// @Summary Load backup records size for cronjob
|
||||||
|
// @Accept json
|
||||||
|
// @Param request body dto.RecordSearchByCronjob true "request"
|
||||||
|
// @Success 200 {array} dto.dto.BackupFile
|
||||||
|
// @Security ApiKeyAuth
|
||||||
|
// @Security Timestamp
|
||||||
|
// @Router /settings/backup/record/size/bycronjob [post]
|
||||||
|
func (b *BaseApi) LoadBackupSizeByCronjob(c *gin.Context) {
|
||||||
|
var req dto.RecordSearchByCronjob
|
||||||
|
if err := helper.CheckBindAndValidate(&req, c); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
list, err := backupService.LoadSizeByCronjob(req)
|
||||||
|
if err != nil {
|
||||||
|
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
helper.SuccessWithData(c, list)
|
||||||
|
}
|
||||||
|
|
||||||
// @Tags Backup Account
|
// @Tags Backup Account
|
||||||
// @Summary Download backup record
|
// @Summary Download backup record
|
||||||
// @Accept json
|
// @Accept json
|
||||||
|
|
|
@ -21,6 +21,12 @@ type BackupInfo struct {
|
||||||
Vars string `json:"vars"`
|
Vars string `json:"vars"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type BackupFile struct {
|
||||||
|
ID uint `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
type OneDriveInfo struct {
|
type OneDriveInfo struct {
|
||||||
ClientID string `json:"client_id"`
|
ClientID string `json:"client_id"`
|
||||||
ClientSecret string `json:"client_secret"`
|
ClientSecret string `json:"client_secret"`
|
||||||
|
@ -66,7 +72,6 @@ type BackupRecords struct {
|
||||||
BackupType string `json:"backupType"`
|
BackupType string `json:"backupType"`
|
||||||
FileDir string `json:"fileDir"`
|
FileDir string `json:"fileDir"`
|
||||||
FileName string `json:"fileName"`
|
FileName string `json:"fileName"`
|
||||||
Size int64 `json:"size"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type DownloadRecord struct {
|
type DownloadRecord struct {
|
||||||
|
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
@ -30,7 +29,9 @@ type BackupService struct{}
|
||||||
type IBackupService interface {
|
type IBackupService interface {
|
||||||
List() ([]dto.BackupInfo, error)
|
List() ([]dto.BackupInfo, error)
|
||||||
SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error)
|
SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error)
|
||||||
|
LoadSize(req dto.RecordSearch) ([]dto.BackupFile, error)
|
||||||
SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error)
|
SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error)
|
||||||
|
LoadSizeByCronjob(req dto.RecordSearchByCronjob) ([]dto.BackupFile, error)
|
||||||
LoadOneDriveInfo() (dto.OneDriveInfo, error)
|
LoadOneDriveInfo() (dto.OneDriveInfo, error)
|
||||||
DownloadRecord(info dto.DownloadRecord) (string, error)
|
DownloadRecord(info dto.DownloadRecord) (string, error)
|
||||||
Create(backupDto dto.BackupOperate) error
|
Create(backupDto dto.BackupOperate) error
|
||||||
|
@ -94,11 +95,29 @@ func (u *BackupService) SearchRecordsWithPage(search dto.RecordSearch) (int64, [
|
||||||
return 0, nil, err
|
return 0, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
datas, err := u.loadRecordSize(records)
|
var list []dto.BackupRecords
|
||||||
sort.Slice(datas, func(i, j int) bool {
|
for _, item := range records {
|
||||||
return datas[i].CreatedAt.After(datas[j].CreatedAt)
|
var itemRecord dto.BackupRecords
|
||||||
})
|
if err := copier.Copy(&itemRecord, &item); err != nil {
|
||||||
return total, datas, err
|
continue
|
||||||
|
}
|
||||||
|
list = append(list, itemRecord)
|
||||||
|
}
|
||||||
|
return total, list, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *BackupService) LoadSize(req dto.RecordSearch) ([]dto.BackupFile, error) {
|
||||||
|
_, records, err := backupRepo.PageRecord(
|
||||||
|
req.Page, req.PageSize,
|
||||||
|
commonRepo.WithOrderBy("created_at desc"),
|
||||||
|
commonRepo.WithByName(req.Name),
|
||||||
|
commonRepo.WithByType(req.Type),
|
||||||
|
backupRepo.WithByDetailName(req.DetailName),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return u.loadRecordSize(records)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *BackupService) ListAppRecords(name, detailName, fileName string) ([]model.BackupRecord, error) {
|
func (u *BackupService) ListAppRecords(name, detailName, fileName string) ([]model.BackupRecord, error) {
|
||||||
|
@ -125,11 +144,27 @@ func (u *BackupService) SearchRecordsByCronjobWithPage(search dto.RecordSearchBy
|
||||||
return 0, nil, err
|
return 0, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
datas, err := u.loadRecordSize(records)
|
var list []dto.BackupRecords
|
||||||
sort.Slice(datas, func(i, j int) bool {
|
for _, item := range records {
|
||||||
return datas[i].CreatedAt.After(datas[j].CreatedAt)
|
var itemRecord dto.BackupRecords
|
||||||
})
|
if err := copier.Copy(&itemRecord, &item); err != nil {
|
||||||
return total, datas, err
|
continue
|
||||||
|
}
|
||||||
|
list = append(list, itemRecord)
|
||||||
|
}
|
||||||
|
return total, list, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *BackupService) LoadSizeByCronjob(req dto.RecordSearchByCronjob) ([]dto.BackupFile, error) {
|
||||||
|
_, records, err := backupRepo.PageRecord(
|
||||||
|
req.Page, req.PageSize,
|
||||||
|
commonRepo.WithOrderBy("created_at desc"),
|
||||||
|
backupRepo.WithByCronID(req.CronjobID),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return u.loadRecordSize(records)
|
||||||
}
|
}
|
||||||
|
|
||||||
type loadSizeHelper struct {
|
type loadSizeHelper struct {
|
||||||
|
@ -482,15 +517,14 @@ func (u *BackupService) loadAccessToken(backup *model.BackupAccount) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *BackupService) loadRecordSize(records []model.BackupRecord) ([]dto.BackupRecords, error) {
|
func (u *BackupService) loadRecordSize(records []model.BackupRecord) ([]dto.BackupFile, error) {
|
||||||
var datas []dto.BackupRecords
|
var datas []dto.BackupFile
|
||||||
clientMap := make(map[string]loadSizeHelper)
|
clientMap := make(map[string]loadSizeHelper)
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
for i := 0; i < len(records); i++ {
|
for i := 0; i < len(records); i++ {
|
||||||
var item dto.BackupRecords
|
var item dto.BackupFile
|
||||||
if err := copier.Copy(&item, &records[i]); err != nil {
|
item.ID = records[i].ID
|
||||||
return nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
|
item.Name = records[i].FileName
|
||||||
}
|
|
||||||
itemPath := path.Join(records[i].FileDir, records[i].FileName)
|
itemPath := path.Join(records[i].FileDir, records[i].FileName)
|
||||||
if _, ok := clientMap[records[i].Source]; !ok {
|
if _, ok := clientMap[records[i].Source]; !ok {
|
||||||
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
|
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
|
||||||
|
|
|
@ -56,7 +56,9 @@ func (s *SettingRouter) InitRouter(Router *gin.RouterGroup) {
|
||||||
settingRouter.POST("/backup/del", baseApi.DeleteBackup)
|
settingRouter.POST("/backup/del", baseApi.DeleteBackup)
|
||||||
settingRouter.POST("/backup/update", baseApi.UpdateBackup)
|
settingRouter.POST("/backup/update", baseApi.UpdateBackup)
|
||||||
settingRouter.POST("/backup/record/search", baseApi.SearchBackupRecords)
|
settingRouter.POST("/backup/record/search", baseApi.SearchBackupRecords)
|
||||||
|
settingRouter.POST("/backup/record/size", baseApi.LoadBackupSize)
|
||||||
settingRouter.POST("/backup/record/search/bycronjob", baseApi.SearchBackupRecordsByCronjob)
|
settingRouter.POST("/backup/record/search/bycronjob", baseApi.SearchBackupRecordsByCronjob)
|
||||||
|
settingRouter.POST("/backup/record/size/bycronjob", baseApi.LoadBackupSizeByCronjob)
|
||||||
settingRouter.POST("/backup/record/download", baseApi.DownloadRecord)
|
settingRouter.POST("/backup/record/download", baseApi.DownloadRecord)
|
||||||
settingRouter.POST("/backup/record/del", baseApi.DeleteBackupRecord)
|
settingRouter.POST("/backup/record/del", baseApi.DeleteBackupRecord)
|
||||||
|
|
||||||
|
|
|
@ -40,6 +40,11 @@ export namespace Backup {
|
||||||
fileDir: string;
|
fileDir: string;
|
||||||
fileName: string;
|
fileName: string;
|
||||||
}
|
}
|
||||||
|
export interface BackupFile {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
size: number;
|
||||||
|
}
|
||||||
export interface ForBucket {
|
export interface ForBucket {
|
||||||
type: string;
|
type: string;
|
||||||
accessKey: string;
|
accessKey: string;
|
||||||
|
|
|
@ -124,9 +124,15 @@ export const deleteBackupRecord = (params: { ids: number[] }) => {
|
||||||
export const searchBackupRecords = (params: Backup.SearchBackupRecord) => {
|
export const searchBackupRecords = (params: Backup.SearchBackupRecord) => {
|
||||||
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params, TimeoutEnum.T_5M);
|
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params, TimeoutEnum.T_5M);
|
||||||
};
|
};
|
||||||
|
export const loadBackupSize = (param: Backup.SearchBackupRecord) => {
|
||||||
|
return http.post<Array<Backup.BackupFile>>(`/settings/backup/record/size`, param);
|
||||||
|
};
|
||||||
export const searchBackupRecordsByCronjob = (params: Backup.SearchBackupRecordByCronjob) => {
|
export const searchBackupRecordsByCronjob = (params: Backup.SearchBackupRecordByCronjob) => {
|
||||||
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search/bycronjob`, params, TimeoutEnum.T_5M);
|
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search/bycronjob`, params, TimeoutEnum.T_5M);
|
||||||
};
|
};
|
||||||
|
export const loadCronjobBackupSize = (param: Backup.SearchBackupRecordByCronjob) => {
|
||||||
|
return http.post<Array<Backup.BackupFile>>(`/settings/backup/record/size/bycronjob`, param);
|
||||||
|
};
|
||||||
|
|
||||||
export const getBackupList = () => {
|
export const getBackupList = () => {
|
||||||
return http.get<Array<Backup.BackupInfo>>(`/settings/backup/search`);
|
return http.get<Array<Backup.BackupInfo>>(`/settings/backup/search`);
|
||||||
|
|
|
@ -48,10 +48,15 @@
|
||||||
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
|
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
|
||||||
<el-table-column :label="$t('file.size')" prop="size" show-overflow-tooltip>
|
<el-table-column :label="$t('file.size')" prop="size" show-overflow-tooltip>
|
||||||
<template #default="{ row }">
|
<template #default="{ row }">
|
||||||
<span v-if="row.size">
|
<div v-if="row.hasLoad">
|
||||||
{{ computeSize(row.size) }}
|
<span v-if="row.size">
|
||||||
</span>
|
{{ computeSize(row.size) }}
|
||||||
<span v-else>-</span>
|
</span>
|
||||||
|
<span v-else>-</span>
|
||||||
|
</div>
|
||||||
|
<div v-if="!row.hasLoad">
|
||||||
|
<el-button link loading></el-button>
|
||||||
|
</div>
|
||||||
</template>
|
</template>
|
||||||
</el-table-column>
|
</el-table-column>
|
||||||
<el-table-column :label="$t('database.source')" prop="backupType">
|
<el-table-column :label="$t('database.source')" prop="backupType">
|
||||||
|
@ -110,7 +115,7 @@ import { computeSize, dateFormat, downloadFile } from '@/utils/util';
|
||||||
import { getBackupList, handleBackup, handleRecover } from '@/api/modules/setting';
|
import { getBackupList, handleBackup, handleRecover } from '@/api/modules/setting';
|
||||||
import i18n from '@/lang';
|
import i18n from '@/lang';
|
||||||
import DrawerHeader from '@/components/drawer-header/index.vue';
|
import DrawerHeader from '@/components/drawer-header/index.vue';
|
||||||
import { deleteBackupRecord, downloadBackupRecord, searchBackupRecords } from '@/api/modules/setting';
|
import { deleteBackupRecord, downloadBackupRecord, searchBackupRecords, loadBackupSize } from '@/api/modules/setting';
|
||||||
import { Backup } from '@/api/interface/backup';
|
import { Backup } from '@/api/interface/backup';
|
||||||
import router from '@/routers';
|
import router from '@/routers';
|
||||||
import { MsgSuccess } from '@/utils/message';
|
import { MsgSuccess } from '@/utils/message';
|
||||||
|
@ -197,6 +202,31 @@ const search = async () => {
|
||||||
loading.value = false;
|
loading.value = false;
|
||||||
data.value = res.data.items || [];
|
data.value = res.data.items || [];
|
||||||
paginationConfig.total = res.data.total;
|
paginationConfig.total = res.data.total;
|
||||||
|
if (paginationConfig.total !== 0) {
|
||||||
|
loadSize(params);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
loading.value = false;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const loadSize = async (params: any) => {
|
||||||
|
await loadBackupSize(params)
|
||||||
|
.then((res) => {
|
||||||
|
let stats = res.data || [];
|
||||||
|
if (stats.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for (const backup of data.value) {
|
||||||
|
for (const item of stats) {
|
||||||
|
if (backup.id === item.id) {
|
||||||
|
backup.hasLoad = true;
|
||||||
|
backup.size = item.size;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
loading.value = false;
|
loading.value = false;
|
||||||
|
|
|
@ -26,10 +26,15 @@
|
||||||
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
|
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
|
||||||
<el-table-column :label="$t('file.size')" prop="size" show-overflow-tooltip>
|
<el-table-column :label="$t('file.size')" prop="size" show-overflow-tooltip>
|
||||||
<template #default="{ row }">
|
<template #default="{ row }">
|
||||||
<span v-if="row.size">
|
<div v-if="row.hasLoad">
|
||||||
{{ computeSize(row.size) }}
|
<span v-if="row.size">
|
||||||
</span>
|
{{ computeSize(row.size) }}
|
||||||
<span v-else>-</span>
|
</span>
|
||||||
|
<span v-else>-</span>
|
||||||
|
</div>
|
||||||
|
<div v-if="!row.hasLoad">
|
||||||
|
<el-button link loading></el-button>
|
||||||
|
</div>
|
||||||
</template>
|
</template>
|
||||||
</el-table-column>
|
</el-table-column>
|
||||||
<el-table-column :label="$t('database.source')" prop="backupType">
|
<el-table-column :label="$t('database.source')" prop="backupType">
|
||||||
|
@ -57,7 +62,7 @@ import { reactive, ref } from 'vue';
|
||||||
import { computeSize, dateFormat, downloadFile } from '@/utils/util';
|
import { computeSize, dateFormat, downloadFile } from '@/utils/util';
|
||||||
import i18n from '@/lang';
|
import i18n from '@/lang';
|
||||||
import DrawerHeader from '@/components/drawer-header/index.vue';
|
import DrawerHeader from '@/components/drawer-header/index.vue';
|
||||||
import { downloadBackupRecord, searchBackupRecordsByCronjob } from '@/api/modules/setting';
|
import { downloadBackupRecord, loadCronjobBackupSize, searchBackupRecordsByCronjob } from '@/api/modules/setting';
|
||||||
import { Backup } from '@/api/interface/backup';
|
import { Backup } from '@/api/interface/backup';
|
||||||
|
|
||||||
const selects = ref<any>([]);
|
const selects = ref<any>([]);
|
||||||
|
@ -101,6 +106,31 @@ const search = async () => {
|
||||||
loading.value = false;
|
loading.value = false;
|
||||||
data.value = res.data.items || [];
|
data.value = res.data.items || [];
|
||||||
paginationConfig.total = res.data.total;
|
paginationConfig.total = res.data.total;
|
||||||
|
if (paginationConfig.total !== 0) {
|
||||||
|
loadSize(params);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
loading.value = false;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const loadSize = async (params: any) => {
|
||||||
|
await loadCronjobBackupSize(params)
|
||||||
|
.then((res) => {
|
||||||
|
let stats = res.data || [];
|
||||||
|
if (stats.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for (const backup of data.value) {
|
||||||
|
for (const item of stats) {
|
||||||
|
if (backup.id === item.id) {
|
||||||
|
backup.hasLoad = true;
|
||||||
|
backup.size = item.size;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
loading.value = false;
|
loading.value = false;
|
||||||
|
|
Loading…
Add table
Reference in a new issue