mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2025-10-10 15:36:45 +08:00
fix: 解决 zip 压缩失败之后压缩文件还存在的问题 (#3459)
Refs https://github.com/1Panel-dev/1Panel/issues/3012
This commit is contained in:
parent
ee2fa70bb0
commit
aaafeb039e
1 changed files with 9 additions and 2 deletions
|
@ -25,10 +25,14 @@ func (z ZipArchiver) Extract(filePath, dstDir string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (z ZipArchiver) Compress(sourcePaths []string, dstFile string) error {
|
func (z ZipArchiver) Compress(sourcePaths []string, dstFile string) error {
|
||||||
|
var err error
|
||||||
tmpFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s%s.zip", common.RandStr(50), time.Now().Format("20060102150405")))
|
tmpFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s%s.zip", common.RandStr(50), time.Now().Format("20060102150405")))
|
||||||
op := NewFileOp()
|
op := NewFileOp()
|
||||||
defer func() {
|
defer func() {
|
||||||
_ = op.DeleteFile(tmpFile)
|
_ = op.DeleteFile(tmpFile)
|
||||||
|
if err != nil {
|
||||||
|
_ = op.DeleteFile(dstFile)
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
baseDir := path.Dir(sourcePaths[0])
|
baseDir := path.Dir(sourcePaths[0])
|
||||||
relativePaths := make([]string, len(sourcePaths))
|
relativePaths := make([]string, len(sourcePaths))
|
||||||
|
@ -36,8 +40,11 @@ func (z ZipArchiver) Compress(sourcePaths []string, dstFile string) error {
|
||||||
relativePaths[i] = path.Base(sp)
|
relativePaths[i] = path.Base(sp)
|
||||||
}
|
}
|
||||||
cmdStr := fmt.Sprintf("zip -qr %s %s", tmpFile, strings.Join(relativePaths, " "))
|
cmdStr := fmt.Sprintf("zip -qr %s %s", tmpFile, strings.Join(relativePaths, " "))
|
||||||
if err := cmd.ExecCmdWithDir(cmdStr, baseDir); err != nil {
|
if err = cmd.ExecCmdWithDir(cmdStr, baseDir); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return op.Mv(tmpFile, dstFile)
|
if err = op.Mv(tmpFile, dstFile); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue