Files
BackupX/server/internal/backup/helpers.go
Awuqing f388b98943 refactor: single-pass hashing during upload via TeeReader
Previous approach read the file twice (once for SHA-256, once for upload),
doubling disk I/O. Under concurrent multi-target uploads this becomes a
bottleneck.

New design — hashingReader wraps io.TeeReader + sha256.Hash:
  file.Read() → TeeReader → sha256.Write() (hash) + provider (upload)
Single read pass yields both byte count and SHA-256 simultaneously.

Each upload goroutine independently opens the file and computes its own
hash. The first successful target writes checksum to the record via
sync.Once. Zero extra disk I/O, zero extra memory copies, fully
concurrent-safe.
2026-03-31 13:08:10 +08:00

42 lines
1.0 KiB
Go

package backup
import (
"fmt"
"os"
"path/filepath"
"strings"
"time"
)
func createTempArtifact(baseDir, taskName string, extension string) (string, string, error) {
tempDir, err := os.MkdirTemp(baseDir, "backupx-run-*")
if err != nil {
return "", "", fmt.Errorf("create temp dir: %w", err)
}
base := sanitizeFileName(taskName)
if base == "" {
base = "backup"
}
fileName := fmt.Sprintf("%s_%s.%s", base, time.Now().UTC().Format("20060102T150405"), strings.TrimPrefix(extension, "."))
return tempDir, filepath.Join(tempDir, fileName), nil
}
func sanitizeFileName(value string) string {
builder := strings.Builder{}
for _, char := range strings.TrimSpace(value) {
switch {
case char >= 'a' && char <= 'z':
builder.WriteRune(char)
case char >= 'A' && char <= 'Z':
builder.WriteRune(char + ('a' - 'A'))
case char >= '0' && char <= '9':
builder.WriteRune(char)
case char == '-' || char == '_':
builder.WriteRune(char)
case char == ' ' || char == '.':
builder.WriteRune('_')
}
}
return strings.Trim(builder.String(), "_")
}