mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 12:19:47 +08:00
Compare commits
148 Commits
fix/window
...
v0.5.9
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfabd77615 | ||
|
|
76f65cb96c | ||
|
|
4a2dda8aa2 | ||
|
|
8bdc6e8086 | ||
|
|
1eb2f6dffe | ||
|
|
5c5e1fc68f | ||
|
|
fb70f1420c | ||
|
|
d75596921c | ||
|
|
d251594fd9 | ||
|
|
7598bf372b | ||
|
|
64021ffd2a | ||
|
|
fbd785400f | ||
|
|
b573fd95cc | ||
|
|
a097d96380 | ||
|
|
6ee0fea110 | ||
|
|
e6b822c967 | ||
|
|
0ab10d2e80 | ||
|
|
064cdc34be | ||
|
|
c62f4b7d3c | ||
|
|
304a4926d2 | ||
|
|
d1d3fa26f1 | ||
|
|
cabf84a041 | ||
|
|
fc8e62b997 | ||
|
|
9b02720169 | ||
|
|
eb36dcc5a2 | ||
|
|
1a3f137438 | ||
|
|
5f94cd3911 | ||
|
|
bb257c35bc | ||
|
|
b0eb93bfa3 | ||
|
|
11b8e0f12a | ||
|
|
1dabac1a65 | ||
|
|
e013288967 | ||
|
|
8c5fee1c7a | ||
|
|
ec05f518a9 | ||
|
|
2c9aa640fd | ||
|
|
d467322ebe | ||
|
|
9f7cc58fad | ||
|
|
97bf891df3 | ||
|
|
72a9692200 | ||
|
|
e26a456eae | ||
|
|
eaa45f17fd | ||
|
|
f101a59d32 | ||
|
|
501ad9e9a3 | ||
|
|
482a7fce2e | ||
|
|
e6af5f966b | ||
|
|
eef973b7fc | ||
|
|
d8b6b4ef8d | ||
|
|
4d58cc6e26 | ||
|
|
b0bdddad9b | ||
|
|
a73ca36a32 | ||
|
|
92e9381fcc | ||
|
|
c4c7e379d1 | ||
|
|
695713c779 | ||
|
|
6ad690cffc | ||
|
|
ca49b37dc7 | ||
|
|
c8c0c5f20a | ||
|
|
d61d7ec39b | ||
|
|
e964c8ecf8 | ||
|
|
7644462180 | ||
|
|
3bd02e2e09 | ||
|
|
22bd1c4c28 | ||
|
|
0daf702d25 | ||
|
|
058c74e49a | ||
|
|
b85c7529ec | ||
|
|
e521d2125f | ||
|
|
450fdfa59e | ||
|
|
c87b15b22a | ||
|
|
89c81823bc | ||
|
|
797ba27d20 | ||
|
|
ed1f40e04a | ||
|
|
2b190e564f | ||
|
|
1c050aefd0 | ||
|
|
75a5a322e0 | ||
|
|
61d6197fe3 | ||
|
|
6157161293 | ||
|
|
0f843a7dcf | ||
|
|
fb65b553e9 | ||
|
|
1a5bf79dd3 | ||
|
|
dea096d4c2 | ||
|
|
04f8b266d3 | ||
|
|
b53227cb15 | ||
|
|
0246d7fae5 | ||
|
|
4aa177ed37 | ||
|
|
4f5a7bd94b | ||
|
|
00c6f9871f | ||
|
|
6a4b397ecc | ||
|
|
3973038aea | ||
|
|
71b41459e7 | ||
|
|
69942bb77e | ||
|
|
f372b20a68 | ||
|
|
e6da986927 | ||
|
|
4570516678 | ||
|
|
8c91d8929b | ||
|
|
786835c9bc | ||
|
|
f2fc7cbd05 | ||
|
|
462ca57907 | ||
|
|
4bfdb2cb6c | ||
|
|
6918b56ed9 | ||
|
|
1afb8850ad | ||
|
|
3284eeba17 | ||
|
|
494484eb92 | ||
|
|
6156884455 | ||
|
|
a54b8906a3 | ||
|
|
f477feab2f | ||
|
|
e76e174bfe | ||
|
|
b904c0b107 | ||
|
|
c02e7c12e8 | ||
|
|
a87c801e66 | ||
|
|
7f00139847 | ||
|
|
78c5351399 | ||
|
|
e2acfa51eb | ||
|
|
9a684cd82c | ||
|
|
e3b142053f | ||
|
|
3ca898a950 | ||
|
|
84688e995a | ||
|
|
4d0940636d | ||
|
|
26b79adc5f | ||
|
|
90aa3561be | ||
|
|
ec59023736 | ||
|
|
4a96cb93d2 | ||
|
|
4c322db9d0 | ||
|
|
ed18c8285f | ||
|
|
5f8cedabd8 | ||
|
|
20923989b9 | ||
|
|
210106cde7 | ||
|
|
87aac277ec | ||
|
|
4de3f408c5 | ||
|
|
439625a49c | ||
|
|
884d72f3d3 | ||
|
|
98c1600e13 | ||
|
|
eb594b7741 | ||
|
|
587ed3444b | ||
|
|
e366a61910 | ||
|
|
5986b71c4d | ||
|
|
cb18bc3067 | ||
|
|
d676ac9084 | ||
|
|
7fcbcb2471 | ||
|
|
c680e50e74 | ||
|
|
4cb5071b0b | ||
|
|
7ae5341c1c | ||
|
|
01940e74b7 | ||
|
|
30210bc40e | ||
|
|
e90a3e2db6 | ||
|
|
5df95730d8 | ||
|
|
67a9c454d0 | ||
|
|
c17493952b | ||
|
|
dd258bd46c | ||
|
|
505c89066b |
26
.github/release.yaml
vendored
Normal file
26
.github/release.yaml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: 新功能
|
||||
labels:
|
||||
- feature
|
||||
- enhancement
|
||||
- feat
|
||||
- title: 问题修复
|
||||
labels:
|
||||
- bug
|
||||
- fix
|
||||
- title: 文档与流程
|
||||
labels:
|
||||
- docs
|
||||
- documentation
|
||||
- ci
|
||||
- workflow
|
||||
- chore
|
||||
- title: 重构与优化
|
||||
labels:
|
||||
- refactor
|
||||
- perf
|
||||
- optimization
|
||||
- title: 其他更新
|
||||
labels:
|
||||
- '*'
|
||||
3
.github/workflows/release-winget.yml
vendored
3
.github/workflows/release-winget.yml
vendored
@@ -10,6 +10,9 @@ on:
|
||||
description: 'Tag of release you want to publish'
|
||||
type: string
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
|
||||
219
.github/workflows/release.yml
vendored
219
.github/workflows/release.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
# Phase 1: Build in parallel and output artifacts
|
||||
build:
|
||||
@@ -88,6 +91,26 @@ jobs:
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install UPX (Windows)
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
$UPX_VERSION = "4.2.4"
|
||||
$url = "https://github.com/upx/upx/releases/download/v${UPX_VERSION}/upx-${UPX_VERSION}-win64.zip"
|
||||
$zipPath = "$env:RUNNER_TEMP\upx.zip"
|
||||
$extractPath = "$env:RUNNER_TEMP\upx"
|
||||
Write-Host "📥 从 GitHub Releases 下载 UPX v${UPX_VERSION} ..."
|
||||
Invoke-WebRequest -Uri $url -OutFile $zipPath -UseBasicParsing
|
||||
Expand-Archive -Path $zipPath -DestinationPath $extractPath -Force
|
||||
$upxDir = Get-ChildItem -Path $extractPath -Directory | Select-Object -First 1
|
||||
"$($upxDir.FullName)" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
$upxCmd = Join-Path $upxDir.FullName "upx.exe"
|
||||
if (!(Test-Path $upxCmd)) {
|
||||
Write-Error "❌ 未检测到 upx,无法保证 Windows 产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
& $upxCmd --version
|
||||
|
||||
# Linux Dependencies (GTK3, WebKit2GTK required by Wails)
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
@@ -102,6 +125,9 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.0-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y upx-ucl || sudo apt-get install -y upx
|
||||
upx --version
|
||||
|
||||
# AppImage 运行/打包可能需要 FUSE2。不同发行版/版本包名不同,做兼容兜底。
|
||||
sudo apt-get install -y libfuse2 || sudo apt-get install -y libfuse2t64 || true
|
||||
|
||||
@@ -131,15 +157,91 @@ jobs:
|
||||
- name: Install Wails
|
||||
run: go install -v github.com/wailsapp/wails/v2/cmd/wails@latest
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Outcome = "${{ steps.msys2_duckdb.outcome }}"
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
if ($msys2Outcome -ne 'success') {
|
||||
Write-Warning "⚠️ MSYS2 安装步骤结果为 $msys2Outcome,回退到 UCRT64 本机路径探测"
|
||||
} else {
|
||||
Write-Warning "⚠️ MSYS2 已执行,但未找到 UCRT64 gcc/g++,回退到本机路径探测"
|
||||
}
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
}
|
||||
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。已检查:$($candidateBins -join ', ')"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = (Join-Path $mingwBin 'gcc.exe')
|
||||
$gxx = (Join-Path $mingwBin 'g++.exe')
|
||||
|
||||
if (!(Test-Path $gcc) -or !(Test-Path $gxx)) {
|
||||
Write-Error "❌ DuckDB 编译器缺失:gcc=$gcc g++=$gxx"
|
||||
exit 1
|
||||
}
|
||||
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
Write-Host "✅ 已配置 DuckDB cgo 编译器: gcc=$gcc g++=$gxx"
|
||||
|
||||
- name: Verify DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
& "$env:CC" --version
|
||||
& "$env:CXX" --version
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TAG_ARGS=()
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
TAG_ARGS+=(-tags "${{ matrix.wails_tags }}")
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
else
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
fi
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} "${TAG_ARGS[@]}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
@@ -149,12 +251,20 @@ jobs:
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb diros sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine)
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
TAG="gonavi_${DRIVER}_driver"
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "⚠️ 跳过 DuckDB driver(当前平台 ${GOOS}/${GOARCH} 不受支持,仅支持 windows/amd64)"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
@@ -162,20 +272,12 @@ jobs:
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
echo "🔧 构建 ${OUTPUT_PATH} (tag=${TAG})"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
set +e
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
-trimpath \
|
||||
-ldflags "-s -w" \
|
||||
-o "${OUTPUT_PATH}" \
|
||||
./cmd/optional-driver-agent
|
||||
DUCKDB_RC=$?
|
||||
set -e
|
||||
if [ "${DUCKDB_RC}" -ne 0 ]; then
|
||||
echo "⚠️ DuckDB 代理构建失败(平台 ${GOOS}/${GOARCH}),跳过该资产,不阻断发布"
|
||||
rm -f "${OUTPUT_PATH}"
|
||||
continue
|
||||
fi
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
@@ -201,6 +303,13 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
|
||||
APP_BIN=$(find "$APP_PATH/Contents/MacOS" -maxdepth 1 -type f | head -n 1)
|
||||
if [ -z "$APP_BIN" ]; then
|
||||
echo "❌ 未找到 macOS 应用主程序!"
|
||||
exit 1
|
||||
fi
|
||||
echo "ℹ️ macOS 产物不执行 UPX 压缩,保留原始主程序。"
|
||||
|
||||
echo "🔏 正在进行 Ad-hoc 签名..."
|
||||
# 注意:Ad-hoc + hardened runtime(--options runtime)在未配置 entitlements 时,
|
||||
@@ -225,7 +334,7 @@ jobs:
|
||||
mv "$DMG_NAME" "../../$FINAL_NAME"
|
||||
|
||||
# Windows Packaging
|
||||
- name: Package Windows Portable Zip
|
||||
- name: Package Windows EXE
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
@@ -236,7 +345,6 @@ jobs:
|
||||
}
|
||||
$target = "${{ matrix.build_name }}"
|
||||
$finalExeName = "GoNavi-$version-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.exe"
|
||||
$finalZipName = "GoNavi-$version-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.zip"
|
||||
|
||||
if (Test-Path "$target.exe") {
|
||||
$finalExe = "$target.exe"
|
||||
@@ -248,11 +356,39 @@ jobs:
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "📦 生成 Windows 可执行文件 $finalExeName..."
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\\..\\$finalExeName" -Force
|
||||
$isArm64Target = "${{ matrix.arch_name }}".ToLowerInvariant() -eq "arm64"
|
||||
if ($isArm64Target) {
|
||||
Write-Warning "⚠️ UPX 当前不支持 win64/arm64,跳过压缩并保留原始 EXE。"
|
||||
$LASTEXITCODE = 0
|
||||
} else {
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未找到 upx,无法保证 Windows 产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
$beforeBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
Write-Host "🗜️ 使用 UPX 压缩 $finalExe ..."
|
||||
& upx --best --lzma --force $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 压缩失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
& upx -t $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 校验失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
$afterBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
if ($afterBytes -lt $beforeBytes) {
|
||||
$savedBytes = $beforeBytes - $afterBytes
|
||||
Write-Host ("✅ UPX 压缩完成:{0:N2}MB -> {1:N2}MB,减少 {2:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB), ($savedBytes / 1MB))
|
||||
} else {
|
||||
Write-Host ("ℹ️ UPX 压缩完成:{0:N2}MB -> {1:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB))
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "📦 生成 Windows 压缩包 $finalZipName..."
|
||||
Compress-Archive -LiteralPath $finalExe -DestinationPath "..\\..\\$finalZipName" -Force
|
||||
Write-Host "📦 输出 Windows 可执行文件 $finalExeName..."
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\\..\\$finalExeName" -Force
|
||||
|
||||
# Linux Packaging (tar.gz and AppImage)
|
||||
- name: Package Linux
|
||||
@@ -271,6 +407,17 @@ jobs:
|
||||
fi
|
||||
|
||||
chmod +x "$TARGET"
|
||||
BEFORE_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
echo "🗜️ 正在使用 UPX 压缩 Linux 可执行文件: $TARGET ..."
|
||||
upx --best --lzma --force "$TARGET"
|
||||
upx -t "$TARGET"
|
||||
AFTER_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
if [ "$AFTER_BYTES" -lt "$BEFORE_BYTES" ]; then
|
||||
SAVED_BYTES=$((BEFORE_BYTES - AFTER_BYTES))
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" -v s="$SAVED_BYTES" 'BEGIN { printf "✅ Linux UPX 压缩完成:%.2fMB -> %.2fMB,减少 %.2fMB\n", b/1024/1024, a/1024/1024, s/1024/1024 }'
|
||||
else
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" 'BEGIN { printf "ℹ️ Linux UPX 压缩完成:%.2fMB -> %.2fMB\n", b/1024/1024, a/1024/1024 }'
|
||||
fi
|
||||
|
||||
# 1. Create tar.gz
|
||||
echo "📦 正在打包 $TAR_NAME..."
|
||||
@@ -343,7 +490,6 @@ jobs:
|
||||
path: |
|
||||
GoNavi-*.dmg
|
||||
GoNavi-*.exe
|
||||
GoNavi-*.zip
|
||||
GoNavi-*.tar.gz
|
||||
GoNavi-*.AppImage
|
||||
drivers/**
|
||||
@@ -365,6 +511,38 @@ jobs:
|
||||
- name: List Assets
|
||||
run: ls -R release-assets
|
||||
|
||||
- name: Verify Optional Driver Assets
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd release-assets
|
||||
|
||||
REQUIRED_FILES=(
|
||||
"drivers/Windows/duckdb-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/duckdb-driver-agent-linux-amd64"
|
||||
"drivers/Windows/clickhouse-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/clickhouse-driver-agent-linux-amd64"
|
||||
)
|
||||
|
||||
missing=0
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "❌ 缺少驱动资产:$file"
|
||||
missing=1
|
||||
else
|
||||
echo "✅ 已找到驱动资产:$file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$missing" -ne 0 ]; then
|
||||
echo "❌ 可选驱动资产不完整,终止发布"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Package Driver Agents Bundle
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -442,5 +620,6 @@ jobs:
|
||||
files: release-assets/*
|
||||
draft: true
|
||||
make_latest: true
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
412
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
412
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
@@ -0,0 +1,412 @@
|
||||
name: Test Build All Platforms (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
concurrency:
|
||||
group: test-build-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
platform: darwin/amd64
|
||||
os_name: MacOS
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-darwin-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: macos-latest
|
||||
platform: darwin/arm64
|
||||
os_name: MacOS
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-darwin-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/amd64
|
||||
os_name: Windows
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-windows-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/arm64
|
||||
os_name: Windows
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-windows-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: ubuntu-22.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: "4.0"
|
||||
- os: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64-webkit41
|
||||
wails_tags: "webkit2_41"
|
||||
artifact_suffix: "-WebKit41"
|
||||
build_optional_agents: false
|
||||
linux_webkit: "4.1"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.24'
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install UPX (Windows)
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
$UPX_VERSION = "4.2.4"
|
||||
$url = "https://github.com/upx/upx/releases/download/v${UPX_VERSION}/upx-${UPX_VERSION}-win64.zip"
|
||||
$zipPath = "$env:RUNNER_TEMP\upx.zip"
|
||||
$extractPath = "$env:RUNNER_TEMP\upx"
|
||||
Write-Host "📥 从 GitHub Releases 下载 UPX v${UPX_VERSION} ..."
|
||||
Invoke-WebRequest -Uri $url -OutFile $zipPath -UseBasicParsing
|
||||
Expand-Archive -Path $zipPath -DestinationPath $extractPath -Force
|
||||
$upxDir = Get-ChildItem -Path $extractPath -Directory | Select-Object -First 1
|
||||
"$($upxDir.FullName)" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
$upxCmd = Join-Path $upxDir.FullName "upx.exe"
|
||||
if (!(Test-Path $upxCmd)) {
|
||||
Write-Error "❌ 未检测到 upx,无法保证 Windows 测试产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
& $upxCmd --version
|
||||
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev
|
||||
|
||||
if [ "${{ matrix.linux_webkit }}" = "4.1" ]; then
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libsoup-3.0-dev
|
||||
else
|
||||
sudo apt-get install -y libwebkit2gtk-4.0-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y upx-ucl || sudo apt-get install -y upx
|
||||
upx --version
|
||||
|
||||
sudo apt-get install -y libfuse2 || sudo apt-get install -y libfuse2t64 || true
|
||||
|
||||
LINUXDEPLOY_URL="https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
|
||||
PLUGIN_URL="https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/releases/download/continuous/linuxdeploy-plugin-gtk-x86_64.AppImage"
|
||||
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy "$LINUXDEPLOY_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy-plugin-gtk "$PLUGIN_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
|
||||
if [ "${skip-appimage:-false}" != "true" ]; then
|
||||
chmod +x /tmp/linuxdeploy /tmp/linuxdeploy-plugin-gtk
|
||||
fi
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = Join-Path $mingwBin 'gcc.exe'
|
||||
$gxx = Join-Path $mingwBin 'g++.exe'
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
- name: Build App
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
else
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "跳过 DuckDB driver: ${GOOS}/${GOARCH}"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
fi
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Package macOS
|
||||
if: contains(matrix.platform, 'darwin')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
brew install create-dmg
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
APP_PATH=$(find . -maxdepth 1 -name "*.app" | head -n 1)
|
||||
if [ -z "$APP_PATH" ]; then
|
||||
echo "未找到 .app 应用包"
|
||||
exit 1
|
||||
fi
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
APP_BIN=$(find "$APP_PATH/Contents/MacOS" -maxdepth 1 -type f | head -n 1)
|
||||
if [ -z "$APP_BIN" ]; then
|
||||
echo "未找到 macOS 应用主程序"
|
||||
exit 1
|
||||
fi
|
||||
echo "ℹ️ macOS 产物不执行 UPX 压缩,保留原始主程序。"
|
||||
codesign --force --deep --sign - "$APP_NAME"
|
||||
ZIP_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
DMG_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.dmg"
|
||||
mkdir -p ../../artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_NAME" "../../artifacts/$ZIP_NAME"
|
||||
create-dmg \
|
||||
--volname "GoNavi Test Installer" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_NAME" 200 190 \
|
||||
--hide-extension "$APP_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DMG_NAME" \
|
||||
"$APP_NAME"
|
||||
mv "$DMG_NAME" "../../artifacts/$DMG_NAME"
|
||||
shasum -a 256 "../../artifacts/$ZIP_NAME" > "../../artifacts/$ZIP_NAME.sha256"
|
||||
shasum -a 256 "../../artifacts/$DMG_NAME" > "../../artifacts/$DMG_NAME.sha256"
|
||||
|
||||
- name: Package Windows
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
$label = "${{ inputs.build_label }}"
|
||||
if ([string]::IsNullOrWhiteSpace($label)) { $label = 'test' }
|
||||
Set-Location build/bin
|
||||
$target = "${{ matrix.build_name }}"
|
||||
$finalExeName = "GoNavi-$label-${{ matrix.os_name }}-${{ matrix.arch_name }}-run$env:GITHUB_RUN_NUMBER.exe"
|
||||
if (Test-Path "$target.exe") {
|
||||
$finalExe = "$target.exe"
|
||||
} elseif (Test-Path "$target") {
|
||||
Rename-Item -Path "$target" -NewName "$target.exe"
|
||||
$finalExe = "$target.exe"
|
||||
} else {
|
||||
Write-Error "未找到构建产物 '$target'"
|
||||
exit 1
|
||||
}
|
||||
$isArm64Target = "${{ matrix.arch_name }}".ToLowerInvariant() -eq "arm64"
|
||||
if ($isArm64Target) {
|
||||
Write-Warning "⚠️ UPX 当前不支持 win64/arm64,跳过压缩并保留原始 EXE。"
|
||||
$LASTEXITCODE = 0
|
||||
} else {
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未找到 upx,无法保证 Windows 测试产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
$beforeBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
Write-Host "🗜️ 使用 UPX 压缩 $finalExe ..."
|
||||
& upx --best --lzma --force $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 压缩失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
& upx -t $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 校验失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
$afterBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
if ($afterBytes -lt $beforeBytes) {
|
||||
$savedBytes = $beforeBytes - $afterBytes
|
||||
Write-Host ("✅ UPX 压缩完成:{0:N2}MB -> {1:N2}MB,减少 {2:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB), ($savedBytes / 1MB))
|
||||
} else {
|
||||
Write-Host ("ℹ️ UPX 压缩完成:{0:N2}MB -> {1:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB))
|
||||
}
|
||||
}
|
||||
New-Item -ItemType Directory -Force -Path ..\..\artifacts | Out-Null
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\..\artifacts\$finalExeName" -Force
|
||||
Get-FileHash "..\..\artifacts\$finalExeName" -Algorithm SHA256 | ForEach-Object { "{0} *{1}" -f $_.Hash.ToLower(), (Split-Path $_.Path -Leaf) } | Out-File "..\..\artifacts\$finalExeName.sha256" -Encoding ascii
|
||||
|
||||
- name: Package Linux
|
||||
if: contains(matrix.platform, 'linux')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.build_name }}"
|
||||
TAR_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.tar.gz"
|
||||
APPIMAGE_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.AppImage"
|
||||
mkdir -p ../../artifacts
|
||||
|
||||
if [ ! -f "$TARGET" ]; then
|
||||
echo "未找到构建产物 '$TARGET'"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x "$TARGET"
|
||||
BEFORE_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
echo "🗜️ 使用 UPX 压缩 Linux 可执行文件: $TARGET ..."
|
||||
upx --best --lzma --force "$TARGET"
|
||||
upx -t "$TARGET"
|
||||
AFTER_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
if [ "$AFTER_BYTES" -lt "$BEFORE_BYTES" ]; then
|
||||
SAVED_BYTES=$((BEFORE_BYTES - AFTER_BYTES))
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" -v s="$SAVED_BYTES" 'BEGIN { printf "✅ Linux UPX 压缩完成:%.2fMB -> %.2fMB,减少 %.2fMB\n", b/1024/1024, a/1024/1024, s/1024/1024 }'
|
||||
else
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" 'BEGIN { printf "ℹ️ Linux UPX 压缩完成:%.2fMB -> %.2fMB\n", b/1024/1024, a/1024/1024 }'
|
||||
fi
|
||||
tar -czvf "../../artifacts/$TAR_NAME" "$TARGET"
|
||||
sha256sum "../../artifacts/$TAR_NAME" > "../../artifacts/$TAR_NAME.sha256"
|
||||
|
||||
if [ "${skip-appimage:-false}" = "true" ]; then
|
||||
echo "跳过 AppImage 打包"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p AppDir/usr/bin AppDir/usr/share/applications AppDir/usr/share/icons/hicolor/256x256/apps
|
||||
cp "$TARGET" AppDir/usr/bin/gonavi
|
||||
printf '%s\n' '[Desktop Entry]' 'Name=GoNavi' 'Exec=gonavi' 'Icon=gonavi' 'Type=Application' 'Categories=Development;Database;' 'Comment=Database Management Tool' > AppDir/usr/share/applications/gonavi.desktop
|
||||
cp AppDir/usr/share/applications/gonavi.desktop AppDir/gonavi.desktop
|
||||
if [ -f "../../build/appicon.png" ]; then
|
||||
cp "../../build/appicon.png" AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
cp "../../build/appicon.png" AppDir/gonavi.png
|
||||
else
|
||||
touch AppDir/gonavi.png
|
||||
cp AppDir/gonavi.png AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
fi
|
||||
export DEPLOY_GTK_VERSION=3
|
||||
/tmp/linuxdeploy --appdir AppDir --plugin gtk --output appimage || exit 0
|
||||
mv GoNavi*.AppImage "$APPIMAGE_NAME" 2>/dev/null || exit 0
|
||||
mv "$APPIMAGE_NAME" "../../artifacts/$APPIMAGE_NAME"
|
||||
sha256sum "../../artifacts/$APPIMAGE_NAME" > "../../artifacts/$APPIMAGE_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-build-${{ matrix.build_name }}-run${{ github.run_number }}
|
||||
path: |
|
||||
artifacts/*
|
||||
drivers/**
|
||||
if-no-files-found: error
|
||||
retention-days: 7
|
||||
94
.github/workflows/test-macos-build.yml
vendored
Normal file
94
.github/workflows/test-macos-build.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: Test Build macOS (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
push:
|
||||
branches:
|
||||
- feature/kingbase_opt
|
||||
paths:
|
||||
- ".github/workflows/test-macos-build.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
build-macos:
|
||||
name: Build macOS ${{ matrix.arch }}
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: darwin/amd64
|
||||
arch: amd64
|
||||
- platform: darwin/arm64
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24.3"
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OUTPUT_NAME="gonavi-test-${{ matrix.arch }}"
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
wails build \
|
||||
-platform "${{ matrix.platform }}" \
|
||||
-clean \
|
||||
-o "$OUTPUT_NAME" \
|
||||
-ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
|
||||
- name: Package Zip
|
||||
run: |
|
||||
set -euo pipefail
|
||||
APP_PATH="build/bin/gonavi-test-${{ matrix.arch }}.app"
|
||||
if [ ! -d "$APP_PATH" ]; then
|
||||
APP_PATH=$(find build/bin -maxdepth 1 -name "*.app" | head -n 1 || true)
|
||||
fi
|
||||
if [ -z "$APP_PATH" ] || [ ! -d "$APP_PATH" ]; then
|
||||
echo "未找到 .app 产物"
|
||||
ls -la build/bin || true
|
||||
exit 1
|
||||
fi
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
ZIP_NAME="GoNavi-${LABEL}-macos-${{ matrix.arch }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
mkdir -p artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_PATH" "artifacts/$ZIP_NAME"
|
||||
shasum -a 256 "artifacts/$ZIP_NAME" > "artifacts/$ZIP_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gonavi-macos-${{ matrix.arch }}-run${{ github.run_number }}
|
||||
path: artifacts/*
|
||||
if-no-files-found: error
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -17,5 +17,10 @@ dist/
|
||||
GoNavi-Wails
|
||||
GoNavi-Wails.exe
|
||||
.ace-tool/
|
||||
.superpowers/
|
||||
.claude/
|
||||
tmpclaude-*
|
||||
.gemini/
|
||||
**/tmpclaude-*
|
||||
|
||||
CLAUDE.md
|
||||
**/CLAUDE.md
|
||||
|
||||
155
CONTRIBUTING.md
Normal file
155
CONTRIBUTING.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Contributing Guide
|
||||
|
||||
Thank you for contributing to this project.
|
||||
|
||||
This repository follows a release-first workflow: `main` is the default public branch, while releases are prepared through `release/*` branches.
|
||||
|
||||
---
|
||||
|
||||
## Branch Model
|
||||
|
||||
- `main`: stable release branch and default branch
|
||||
- `dev`: day-to-day integration branch for maintainers
|
||||
- `release/*`: release preparation branches for maintainers
|
||||
- Recommended branch names for external contributors:
|
||||
- `fix/*`: bug fixes
|
||||
- `feature/*`: new features or enhancements
|
||||
|
||||
Maintainer release flow:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## How External Contributors Should Open Pull Requests
|
||||
|
||||
Whether your branch is `fix/*` or `feature/*`, external contributors should **open pull requests directly against `main`**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- `main` is the default branch, so the PR entry point is clearer
|
||||
- merged contributions are immediately visible on the default branch
|
||||
- maintainers can handle downstream sync and release preparation in one place
|
||||
|
||||
Recommended flow:
|
||||
|
||||
1. Fork this repository
|
||||
2. Create a branch in your fork (`fix/*` or `feature/*` is recommended)
|
||||
3. Make your changes and perform basic self-checks
|
||||
4. Push the branch to your fork
|
||||
5. Open a pull request against the `main` branch of this repository
|
||||
|
||||
---
|
||||
|
||||
## Pull Request Requirements
|
||||
|
||||
Please keep each pull request focused, reviewable, and easy to validate.
|
||||
|
||||
Recommended expectations:
|
||||
|
||||
- one pull request should address one logical change
|
||||
- use a clear title that explains the purpose
|
||||
- include the following in the description:
|
||||
- background and problem statement
|
||||
- key changes
|
||||
- impact scope
|
||||
- validation method
|
||||
- include screenshots or recordings for UI changes when helpful
|
||||
- explicitly mention risk and rollback notes for compatibility, data, or build-chain changes
|
||||
|
||||
---
|
||||
|
||||
## Merge Strategy for Maintainers
|
||||
|
||||
Pull requests merged into `main` should generally use **Squash and merge**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- keeps `main` history clean and linear
|
||||
- maps each PR to a single commit on `main`
|
||||
- reduces release, audit, and rollback complexity
|
||||
|
||||
---
|
||||
|
||||
## Maintainer Sync Rules
|
||||
|
||||
Because external pull requests are merged directly into `main`, maintainers must sync `main` back to development and release branches to avoid branch drift.
|
||||
|
||||
### 1. Sync `main` -> `dev` (required)
|
||||
|
||||
The automatic GitHub Actions sync workflow has been removed.
|
||||
Maintainers should sync `main` back to `dev` manually when needed:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. Create `release/*` from `dev`
|
||||
|
||||
Before a release, create a release branch from `dev`, for example:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. Release from `release/*` back to `main`
|
||||
|
||||
When release preparation is complete, merge the release branch back into `main` and create a tag:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. Sync `main` back to `dev` after release
|
||||
|
||||
After the release, the same automation still applies. If needed, you can run the workflow manually (`workflow_dispatch`) or execute the fallback commands:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Commit Message Recommendation
|
||||
|
||||
Keep commit messages clear and easy to audit.
|
||||
|
||||
Recommended format:
|
||||
|
||||
```text
|
||||
emoji type(scope): concise description
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): fix DuckDB driver toolchain on Windows AMD64
|
||||
✨ feat(redis): add Stream data browsing support
|
||||
♻️ refactor(datagrid): optimize large-table horizontal scrolling and rendering
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Additional Notes
|
||||
|
||||
- Please include validation results for documentation, build-chain, or driver compatibility changes
|
||||
- For larger changes, opening an issue or draft PR first is recommended
|
||||
- Maintainers may ask contributors to narrow the scope if the change conflicts with the current project direction
|
||||
|
||||
Thank you for contributing.
|
||||
155
CONTRIBUTING.zh-CN.md
Normal file
155
CONTRIBUTING.zh-CN.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# 贡献指南
|
||||
|
||||
感谢你对本项目的贡献。
|
||||
|
||||
本项目采用“发布优先(`main` 为默认分支)+ `release/*` 分支发版”的协作模型。为减少分支漂移与 PR 处理成本,请在提交贡献前先阅读本指南。
|
||||
|
||||
---
|
||||
|
||||
## 分支模型
|
||||
|
||||
- `main`:稳定发布分支,也是仓库默认分支
|
||||
- `dev`:日常开发集成分支,主要供维护者使用
|
||||
- `release/*`:发布准备分支,主要供维护者使用
|
||||
- 外部贡献者建议使用以下分支命名:
|
||||
- `fix/*`:问题修复
|
||||
- `feature/*`:功能新增或增强
|
||||
|
||||
维护者发布流转如下:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 外部贡献者如何提 Pull Request
|
||||
|
||||
无论是 `fix/*` 还是 `feature/*`,**外部贡献者统一直接向 `main` 发起 Pull Request**。
|
||||
|
||||
这样做的原因:
|
||||
|
||||
- `main` 是默认分支,PR 入口更直观
|
||||
- 合并后贡献会直接体现在默认分支
|
||||
- 便于维护者统一做后续同步与发版整理
|
||||
|
||||
建议流程:
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 从你自己的仓库创建分支(建议命名为 `fix/*` 或 `feature/*`)
|
||||
3. 完成代码修改,并进行必要自检
|
||||
4. 推送到你的远程分支
|
||||
5. 向本仓库的 `main` 分支发起 Pull Request
|
||||
|
||||
---
|
||||
|
||||
## Pull Request 要求
|
||||
|
||||
请尽量保证 PR 单一、清晰、可审核。
|
||||
|
||||
建议遵循以下要求:
|
||||
|
||||
- 一个 PR 只解决一类问题,避免混入无关改动
|
||||
- 标题清晰说明改动目的
|
||||
- 描述中说明:
|
||||
- 背景与问题
|
||||
- 变更点
|
||||
- 影响范围
|
||||
- 验证方式
|
||||
- 如涉及 UI 调整,建议附截图或录屏
|
||||
- 如涉及兼容性、数据变更或构建链路调整,请明确说明风险和回滚方式
|
||||
|
||||
---
|
||||
|
||||
## PR 合并策略(维护者)
|
||||
|
||||
`main` 分支上的 PR 建议使用 **Squash and merge**。
|
||||
|
||||
原因:
|
||||
|
||||
- 保持 `main` 历史干净、线性
|
||||
- 每个 PR 在 `main` 上对应一个清晰提交
|
||||
- 降低发布排查与回滚成本
|
||||
|
||||
---
|
||||
|
||||
## 维护者同步规则
|
||||
|
||||
由于外部 PR 会直接合入 `main`,维护者必须及时将 `main` 的变更同步到开发与发布分支,避免分支漂移。
|
||||
|
||||
### 1. main → dev 同步(必做)
|
||||
|
||||
仓库已移除 GitHub Actions 自动回灌 workflow。
|
||||
当前统一采用手动方式将 `main` 同步回 `dev`:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. 发版前从 dev 切 release/*
|
||||
|
||||
发布前由维护者基于 `dev` 创建发布分支,例如:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. release/* → main 发版
|
||||
|
||||
发布准备完成后,将 `release/*` 合并回 `main`,并打标签发布:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. main 回流到 dev(发版后必做)
|
||||
|
||||
发布完成后,仍沿用同一套自动化流程;如有需要,也可以手动触发 `workflow_dispatch`,或执行以下兜底命令,确保开发线与发布线一致:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 提交建议
|
||||
|
||||
建议保持提交信息简洁、明确,便于维护者审查与后续追踪。
|
||||
|
||||
推荐格式:
|
||||
|
||||
```text
|
||||
emoji type(scope): 中文描述
|
||||
```
|
||||
|
||||
示例:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): 修复 Windows AMD64 下 DuckDB 驱动构建工具链
|
||||
✨ feat(redis): 新增 Stream 类型数据浏览支持
|
||||
♻️ refactor(datagrid): 优化大表横向滚动与渲染结构
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 其他说明
|
||||
|
||||
- 文档、构建链路、驱动兼容性相关改动,请尽量附带验证结果
|
||||
- 若改动较大,建议先提 Issue 或 Draft PR,先对齐方案再实施
|
||||
- 如提交内容与项目当前架构方向冲突,维护者可能要求收敛范围后再合并
|
||||
|
||||
感谢你的贡献。
|
||||
229
README.md
229
README.md
@@ -1,4 +1,4 @@
|
||||
# GoNavi - 现代化的轻量级数据库管理工具
|
||||
# GoNavi - A Modern Lightweight Database Client
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
@@ -6,11 +6,51 @@
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**GoNavi** 是一款基于 **Wails (Go)** 和 **React** 构建的现代化、高性能、跨平台数据库管理客户端。它旨在提供如原生应用般流畅的用户体验,同时保持极低的资源占用。
|
||||
**Language**: English | [简体中文](README.zh-CN.md)
|
||||
|
||||
相比于 Electron 应用,GoNavi 的体积更小(~10MB),启动速度更快,内存占用更低。
|
||||
GoNavi is a modern, high-performance, cross-platform database client built with **Wails (Go)** and **React**.
|
||||
It delivers native-like responsiveness with low resource usage.
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
Compared with many Electron-based clients, GoNavi is typically smaller in binary size (around 10MB class), starts faster, and uses less memory.
|
||||
|
||||
---
|
||||
|
||||
## Project Overview
|
||||
|
||||
GoNavi is designed for developers and DBAs who need a unified desktop experience across multiple databases.
|
||||
|
||||
- **Native-performance architecture**: Wails (Go + WebView) with lightweight runtime overhead.
|
||||
- **Large dataset usability**: virtualized rendering and optimized DataGrid workflows for high-volume tables.
|
||||
- **Unified connectivity**: URI build/parse, SSH tunnel, proxy support, and on-demand driver activation.
|
||||
- **Production-oriented workflow**: SQL editor, object management, batch export/backup, sync tools, execution logs, and update checks.
|
||||
|
||||
## Supported Data Sources
|
||||
|
||||
> `Built-in`: available out of the box.
|
||||
> `Optional driver agent`: install/enable via Driver Manager first.
|
||||
|
||||
| Category | Data Source | Driver Mode | Typical Capabilities |
|
||||
|---|---|---|---|
|
||||
| Relational | MySQL | Built-in | Schema browsing, SQL query, data editing, export/backup |
|
||||
| Relational | PostgreSQL | Built-in | Schema browsing, SQL query, data editing, object management |
|
||||
| Relational | Oracle | Built-in | Query execution, object browsing, data editing |
|
||||
| Cache | Redis | Built-in | Key browsing, command execution, encoding/view switch |
|
||||
| Relational | MariaDB | Optional driver agent | Querying, object management, data editing |
|
||||
| Relational | Doris | Optional driver agent | Querying, object browsing, SQL execution |
|
||||
| Search | Sphinx | Optional driver agent | SphinxQL querying and object browsing |
|
||||
| Relational | SQL Server | Optional driver agent | Schema browsing, SQL query, object management |
|
||||
| File-based | SQLite | Optional driver agent | Local DB browsing, editing, export |
|
||||
| File-based | DuckDB | Optional driver agent | Large-table query, pagination, file-DB workflow |
|
||||
| Domestic DB | Dameng | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Kingbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | HighGo | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Vastbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Document | MongoDB | Optional driver agent | Document query, collection browsing, connection management |
|
||||
| Time-series | TDengine | Optional driver agent | Time-series schema browsing and querying |
|
||||
| Columnar Analytics | ClickHouse | Optional driver agent | Analytical query, object browsing, SQL execution |
|
||||
| Extensibility | Custom Driver/DSN | Custom | Extend to more data sources via Driver + DSN |
|
||||
|
||||
<h2 align="center">📸 Screenshots</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
@@ -24,137 +64,124 @@
|
||||
|
||||
---
|
||||
|
||||
## ✨ 核心特性
|
||||
## Key Features
|
||||
|
||||
### 🚀 极致性能
|
||||
- **零卡顿交互**:采用独创的 "幽灵拖拽" (Ghost Resizing) 技术,在包含数万行数据的表格中调整列宽,依然保持 60fps+ 的丝滑体验。
|
||||
- **虚拟滚动**:轻松处理海量数据展示,拒绝卡顿。
|
||||
### Performance
|
||||
- **Smooth interaction under load**: optimized table interaction (including column resize workflow on large datasets).
|
||||
- **Virtualized rendering**: keeps large result sets responsive.
|
||||
|
||||
### 🔌 多数据库支持
|
||||
- **MySQL**:完整支持,涵盖数据编辑、结构管理与导入导出。
|
||||
- **PostgreSQL**:数据查看与编辑支持,事务提交能力持续完善。
|
||||
- **SQLite**:本地文件数据库支持。
|
||||
- **Oracle**:基础数据访问与编辑支持。
|
||||
- **Dameng(达梦)**:基础数据访问与编辑支持。
|
||||
- **Kingbase(人大金仓)**:基础数据访问与编辑支持。
|
||||
- **TDengine**:时序数据库连接、库表浏览与 SQL 查询支持。
|
||||
- **Redis**:Key/Value 浏览、命令执行、视图与编码切换。
|
||||
- **自定义驱动**:支持配置 Driver/DSN 接入更多数据源。
|
||||
- **SSH 隧道**:内置 SSH 隧道支持,安全连接内网数据库。
|
||||
### Data Management (DataGrid)
|
||||
- In-place cell editing.
|
||||
- Batch insert/update/delete with transaction-oriented submit/rollback.
|
||||
- Large-field popup editor.
|
||||
- Context actions (set NULL, copy/export, etc.).
|
||||
- Smart read/write mode switching based on query context.
|
||||
- Export formats: CSV, Excel (XLSX), JSON, Markdown.
|
||||
|
||||
### 📊 强大的数据管理 (DataGrid)
|
||||
- **所见即所得编辑**:直接在表格中双击单元格修改数据。
|
||||
- **批量事务操作**:支持批量新增、修改、删除,一键提交或回滚事务。
|
||||
- **大字段编辑**:双击大字段自动打开弹窗编辑器,避免卡顿。
|
||||
- **右键上下文菜单**:快速设置 NULL、复制/导出等操作。
|
||||
- **智能上下文**:自动识别单表查询,解锁编辑功能;复杂查询自动切换为只读模式。
|
||||
- **批量导出/备份**:支持表与数据库的批量导出/备份。
|
||||
- **数据导出**:支持 CSV、Excel (XLSX)、JSON、Markdown 等格式。
|
||||
### SQL Editor
|
||||
- Monaco Editor core.
|
||||
- Context-aware completion for databases/tables/columns.
|
||||
- Multi-tab query workflow.
|
||||
|
||||
### 🧰 批量导出/备份
|
||||
- **数据库批量导出**:支持结构导出与结构+数据备份。
|
||||
- **表批量导出**:支持多表一键导出/备份。
|
||||
- **智能上下文检测**:自动判断目标范围,避免误操作。
|
||||
### Batch Export / Backup
|
||||
- Database-level and table-level batch export/backup.
|
||||
- Scope-aware operation flow to reduce mistakes.
|
||||
|
||||
### 🧩 Redis 视图与编码
|
||||
- **视图模式切换**:自动/原始文本/UTF-8/十六进制多模式显示。
|
||||
- **智能解码**:针对二进制值进行 UTF-8 质量判定与中文字符识别。
|
||||
- **命令执行**:内置命令面板快速操作。
|
||||
### Connectivity
|
||||
- URI generation/parsing.
|
||||
- SSH tunnel support.
|
||||
- Proxy support.
|
||||
- Config import/export (JSON).
|
||||
- Optional driver management and activation.
|
||||
|
||||
### 🔄 数据同步与导入导出
|
||||
- **连接配置导入/导出**:支持配置 JSON 导入导出,便于团队共享。
|
||||
- **数据同步**:内置数据同步面板,支持跨库同步任务配置。
|
||||
### Redis Tools
|
||||
- Multi-view value rendering (auto/raw text/UTF-8/hex).
|
||||
- Built-in command execution panel.
|
||||
|
||||
### 🆙 在线更新
|
||||
- **自动更新**:启动/定时/手动检查更新,自动下载并提示重启完成更新。
|
||||
### Observability and Update
|
||||
- SQL execution logs with timing information.
|
||||
- Startup/scheduled/manual update checks.
|
||||
|
||||
### 🧾 可观测性
|
||||
- **SQL 执行日志**:实时查看 SQL 与执行耗时,便于排障与优化。
|
||||
|
||||
### 📝 智能 SQL 编辑器
|
||||
- **Monaco Editor 内核**:集成 VS Code 同款编辑器,体验极佳。
|
||||
- **智能补全**:自动感知当前连接上下文,提供数据库、表名、字段名的实时补全。
|
||||
- **多标签页**:支持多窗口并行操作,像浏览器一样管理你的查询会话。
|
||||
|
||||
### 🎨 现代化 UI
|
||||
- **Ant Design 5**:企业级 UI 设计语言。
|
||||
- **暗黑模式**:内置深色/浅色主题切换,适应不同光照环境。
|
||||
- **响应式布局**:灵活的侧边栏与布局调整。
|
||||
### UI/UX
|
||||
- Ant Design 5 based interface.
|
||||
- Light/Dark themes.
|
||||
- Flexible sidebar and layout behavior.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ 技术栈
|
||||
## Tech Stack
|
||||
|
||||
* **后端 (Backend)**: Go 1.24 + Wails v2
|
||||
* **前端 (Frontend)**: React 18 + TypeScript + Vite
|
||||
* **UI 框架**: Ant Design 5
|
||||
* **状态管理**: Zustand
|
||||
* **编辑器**: Monaco Editor
|
||||
- **Backend**: Go 1.24 + Wails v2
|
||||
- **Frontend**: React 18 + TypeScript + Vite
|
||||
- **UI**: Ant Design 5
|
||||
- **State Management**: Zustand
|
||||
- **Editor**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 📦 安装与运行
|
||||
## Installation and Run
|
||||
|
||||
### 前置要求
|
||||
* [Go](https://go.dev/dl/) 1.21+
|
||||
* [Node.js](https://nodejs.org/) 18+
|
||||
* [Wails CLI](https://wails.io/docs/gettingstarted/installation): `go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
### Prerequisites
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
### Development Mode
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
# Clone
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发服务器 (支持热重载)
|
||||
# Start development with hot reload
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# 构建当前平台的可执行文件
|
||||
# Build for current platform
|
||||
wails build
|
||||
|
||||
# 清理并构建 (推荐发布前使用)
|
||||
# Clean build (recommended before release)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物将位于 `build/bin` 目录下。
|
||||
Artifacts are generated in `build/bin`.
|
||||
|
||||
### 跨平台编译 (GitHub Actions)
|
||||
### Cross-Platform Release (GitHub Actions)
|
||||
|
||||
本项目内置了 GitHub Actions 流水线,Push `v*` 格式的 Tag 即可自动触发构建并发布 Release。
|
||||
支持构建:
|
||||
* macOS (AMD64 / ARM64)
|
||||
* Windows (AMD64)
|
||||
* Linux (AMD64,提供 WebKitGTK 4.0 与 4.1 变体产物)
|
||||
The repository includes a release workflow.
|
||||
Push a `v*` tag to trigger automated build and release.
|
||||
Release notes are generated automatically from merged pull requests and categorized by `.github/release.yaml`.
|
||||
|
||||
Target artifacts include:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64, WebKitGTK 4.0 and 4.1 variants)
|
||||
|
||||
---
|
||||
|
||||
## ❓ 常见问题 (Troubleshooting)
|
||||
## Troubleshooting
|
||||
|
||||
### macOS 提示 "应用已损坏,无法打开"
|
||||
### macOS: "App is damaged and can’t be opened"
|
||||
|
||||
由于本项目尚未购买 Apple 开发者证书进行签名(Notarization),macOS 的 Gatekeeper 安全机制可能会拦截应用的运行。请按照以下步骤解决:
|
||||
Without Apple notarization, Gatekeeper may block startup.
|
||||
|
||||
1. 将下载的 `GoNavi.app` 拖入 **应用程序** 文件夹。
|
||||
2. 打开 **终端 (Terminal)**。
|
||||
3. 复制并执行以下命令(输入密码时不会显示):
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
4. 或者:在 Finder 中右键点击应用图标,按住 `Control` 键选择 **打开**,然后在弹出的窗口中再次点击 **打开**。
|
||||
1. Move `GoNavi.app` to **Applications**.
|
||||
2. Open **Terminal**.
|
||||
3. Run:
|
||||
|
||||
### Linux 启动报错缺少 `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
GoNavi 的 Linux 二进制依赖系统 WebKitGTK 运行库。不同发行版默认版本不同:
|
||||
Or right-click the app in Finder and choose **Open** with Control key flow.
|
||||
|
||||
- Debian 13 / Ubuntu 24.04 及更新版本:通常为 WebKitGTK 4.1
|
||||
- Ubuntu 22.04 / Debian 12 等:通常为 WebKitGTK 4.0
|
||||
### Linux: missing `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
如果启动时报错(如 `libwebkit2gtk-4.0.so.37: cannot open shared object file`),请按系统安装对应依赖后重试:
|
||||
GoNavi depends on WebKitGTK runtime libraries.
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
@@ -166,20 +193,20 @@ sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
如果你使用的是 Release 中带 `-WebKit41` 后缀的 Linux 产物,请优先在 Debian 13 / Ubuntu 24.04+ 上使用;普通 Linux 产物更适合 WebKitGTK 4.0 运行环境。
|
||||
If you use Linux artifacts with the `-WebKit41` suffix, prefer Debian 13 / Ubuntu 24.04+.
|
||||
|
||||
---
|
||||
|
||||
## 🤝 贡献指南
|
||||
## Contributing
|
||||
|
||||
欢迎提交 Issue 和 Pull Request!
|
||||
Issues and pull requests are welcome.
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 创建你的特性分支 (`git checkout -b feature/AmazingFeature`)
|
||||
3. 提交你的改动 (`git commit -m 'feat: Add some AmazingFeature'`)
|
||||
4. 推送到分支 (`git push origin feature/AmazingFeature`)
|
||||
5. 开启一个 Pull Request
|
||||
For the full workflow, branch model, and maintainer sync rules, see:
|
||||
|
||||
## 📄 开源协议
|
||||
- [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE) 开源。
|
||||
External contributors should open pull requests directly against `main`.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [Apache-2.0](LICENSE).
|
||||
|
||||
195
README.zh-CN.md
Normal file
195
README.zh-CN.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# GoNavi - 现代化轻量级数据库客户端
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
[](https://reactjs.org/)
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**语言**: [English](README.md) | 简体中文
|
||||
|
||||
GoNavi 是基于 **Wails (Go)** 与 **React** 构建的跨平台数据库管理工具,强调原生性能、低资源占用与多数据源统一工作流。
|
||||
|
||||
相比常见 Electron 客户端,GoNavi 在体积、启动速度和内存占用上更轻量。
|
||||
|
||||
---
|
||||
|
||||
## 项目简介
|
||||
|
||||
GoNavi 面向开发者与 DBA,核心目标是让数据库操作在桌面端做到“快、稳、统一”。
|
||||
|
||||
- **原生性能架构**:Wails(Go + WebView),降低运行时开销。
|
||||
- **大数据可用性**:虚拟滚动 + DataGrid 交互优化,提升大结果集可操作性。
|
||||
- **统一连接能力**:支持 URI 生成/解析、SSH 隧道、代理、驱动按需安装。
|
||||
- **工程化能力完整**:覆盖 SQL 编辑、对象管理、批量导出/备份、数据同步、执行日志、在线更新。
|
||||
|
||||
## 支持的数据源
|
||||
|
||||
> `内置`:主程序开箱即用。
|
||||
> `可选驱动代理`:需在驱动管理中安装启用后可用。
|
||||
|
||||
| 类别 | 数据源 | 驱动模式 | 典型能力 |
|
||||
|---|---|---|---|
|
||||
| 关系型 | MySQL | 内置 | 库表浏览、SQL 查询、数据编辑、导出/备份 |
|
||||
| 关系型 | PostgreSQL | 内置 | 库表浏览、SQL 查询、数据编辑、对象管理 |
|
||||
| 关系型 | Oracle | 内置 | 连接查询、对象浏览、数据编辑 |
|
||||
| 缓存 | Redis | 内置 | Key 浏览、命令执行、编码/视图切换 |
|
||||
| 关系型 | MariaDB | 可选驱动代理 | 连接查询、对象管理、数据编辑 |
|
||||
| 关系型 | Doris | 可选驱动代理 | 连接查询、对象浏览、SQL 执行 |
|
||||
| 搜索 | Sphinx | 可选驱动代理 | SphinxQL 查询与对象浏览 |
|
||||
| 关系型 | SQL Server | 可选驱动代理 | 库表浏览、SQL 查询、对象管理 |
|
||||
| 文件型 | SQLite | 可选驱动代理 | 本地文件库浏览、编辑、导出 |
|
||||
| 文件型 | DuckDB | 可选驱动代理 | 大表查询、分页浏览、文件库管理 |
|
||||
| 国产数据库 | Dameng | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Kingbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | HighGo | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Vastbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 文档型 | MongoDB | 可选驱动代理 | 文档查询、集合浏览、连接管理 |
|
||||
| 时序 | TDengine | 可选驱动代理 | 时序库表浏览、查询分析 |
|
||||
| 列式分析 | ClickHouse | 可选驱动代理 | 分析查询、对象浏览、SQL 执行 |
|
||||
| 扩展接入 | Custom Driver/DSN | 自定义 | 通过 Driver + DSN 接入更多数据源 |
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/224a74e7-65df-4aef-9710-d8e82e3a70c1" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/ec522145-5ceb-4481-ae46-a9251c89bdfc" />
|
||||
<br />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/330ce49b-45f1-4919-ae14-75f7d47e5f73" />
|
||||
<img width="14%" alt="image" src="https://github.com/user-attachments/assets/d15fa9e9-5486-423b-a0e9-53b467e45432" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/f0c57590-d987-4ecf-89b2-64efad60b6d7" />
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## 核心特性
|
||||
|
||||
### 性能与交互
|
||||
- 大数据场景下保持流畅交互(含 DataGrid 列宽拖拽、批量编辑流程优化)。
|
||||
- 虚拟滚动渲染,降低大结果集卡顿风险。
|
||||
|
||||
### 数据管理(DataGrid)
|
||||
- 单元格所见即所得编辑。
|
||||
- 批量新增/修改/删除,支持事务提交与回滚。
|
||||
- 大字段弹窗编辑。
|
||||
- 右键上下文操作(NULL、复制、导出等)。
|
||||
- 根据查询上下文智能切换读写模式。
|
||||
- 支持 CSV / XLSX / JSON / Markdown 导出。
|
||||
|
||||
### SQL 编辑器
|
||||
- 基于 Monaco Editor。
|
||||
- 上下文补全(数据库/表/字段)。
|
||||
- 多标签查询工作流。
|
||||
|
||||
### 连接与驱动
|
||||
- URI 生成与解析。
|
||||
- SSH 隧道、代理支持。
|
||||
- 连接配置 JSON 导入/导出。
|
||||
- 可选驱动安装与启用管理。
|
||||
|
||||
### Redis 工具
|
||||
- 自动/原始文本/UTF-8/十六进制等视图模式。
|
||||
- 内置命令执行面板。
|
||||
|
||||
### 可观测性与更新
|
||||
- SQL 执行日志(含耗时)。
|
||||
- 启动/定时/手动更新检查。
|
||||
|
||||
### UI 体验
|
||||
- Ant Design 5 体系。
|
||||
- 深色/浅色主题切换。
|
||||
- 灵活布局与侧边栏行为。
|
||||
|
||||
---
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **后端**: Go 1.24 + Wails v2
|
||||
- **前端**: React 18 + TypeScript + Vite
|
||||
- **UI 框架**: Ant Design 5
|
||||
- **状态管理**: Zustand
|
||||
- **编辑器**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 安装与运行
|
||||
|
||||
### 前置要求
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发(热重载)
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
|
||||
```bash
|
||||
# 构建当前平台
|
||||
wails build
|
||||
|
||||
# 清理后构建(发布前推荐)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物位于 `build/bin`。
|
||||
|
||||
### 跨平台发布(GitHub Actions)
|
||||
|
||||
仓库内置发布流水线,推送 `v*` Tag 可自动构建并发布 Release。
|
||||
Release 更新说明会基于已合并 Pull Request 自动生成,并按 `.github/release.yaml` 分类。
|
||||
|
||||
支持目标:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64,含 WebKitGTK 4.0 / 4.1 变体)
|
||||
|
||||
---
|
||||
|
||||
## 常见问题
|
||||
|
||||
### macOS 提示“应用已损坏,无法打开”
|
||||
|
||||
在未进行 Apple Notarization 时,Gatekeeper 可能拦截应用。
|
||||
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
### Linux 缺少 `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.1-0 libjavascriptcoregtk-4.1-0
|
||||
|
||||
# Ubuntu 22.04 / Debian 12
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 贡献指南
|
||||
|
||||
欢迎提交 Issue 与 Pull Request。
|
||||
|
||||
完整流程、分支模型与维护者同步规则请查看:
|
||||
|
||||
- [CONTRIBUTING.zh-CN.md](CONTRIBUTING.zh-CN.md)
|
||||
|
||||
外部贡献者统一直接向 `main` 发起 Pull Request。
|
||||
|
||||
## 开源协议
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE)。
|
||||
228
build-driver-agents.sh
Executable file
228
build-driver-agents.sh
Executable file
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
DEFAULT_DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
用法:
|
||||
./build-driver-agents.sh [选项]
|
||||
|
||||
选项:
|
||||
--drivers <列表> 指定驱动列表(逗号分隔),例如:kingbase,mongodb
|
||||
--platform <GOOS/GOARCH>
|
||||
目标平台,默认使用当前 Go 环境(go env GOOS/GOARCH)
|
||||
--out-dir <目录> 输出目录根路径,默认:dist/driver-agents
|
||||
--bundle-name <文件名> 驱动总包 zip 名称,默认:GoNavi-DriverAgents.zip
|
||||
--strict 任一驱动构建失败即中断(默认失败后继续,最后汇总)
|
||||
-h, --help 显示帮助
|
||||
|
||||
示例:
|
||||
./build-driver-agents.sh
|
||||
./build-driver-agents.sh --drivers kingbase
|
||||
./build-driver-agents.sh --platform windows/amd64 --drivers kingbase,mongodb
|
||||
EOF
|
||||
}
|
||||
|
||||
normalize_driver() {
|
||||
local name
|
||||
name="$(echo "${1:-}" | tr '[:upper:]' '[:lower:]' | xargs)"
|
||||
case "$name" in
|
||||
doris|diros) echo "doris" ;;
|
||||
mariadb|sphinx|sqlserver|sqlite|duckdb|dameng|kingbase|highgo|vastbase|mongodb|tdengine|clickhouse)
|
||||
echo "$name"
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
build_driver_name() {
|
||||
case "$1" in
|
||||
doris) echo "diros" ;;
|
||||
*) echo "$1" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
platform_dir_name() {
|
||||
case "$1" in
|
||||
windows) echo "Windows" ;;
|
||||
darwin) echo "MacOS" ;;
|
||||
linux) echo "Linux" ;;
|
||||
*) echo "Unknown" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
driver_csv=""
|
||||
target_platform=""
|
||||
out_root="dist/driver-agents"
|
||||
bundle_name="GoNavi-DriverAgents.zip"
|
||||
strict_mode="false"
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--drivers)
|
||||
driver_csv="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--platform)
|
||||
target_platform="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--out-dir)
|
||||
out_root="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--bundle-name)
|
||||
bundle_name="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--strict)
|
||||
strict_mode="true"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "❌ 未知参数:$1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if ! command -v go >/dev/null 2>&1; then
|
||||
echo "❌ 未找到 Go,请先安装 Go 并确保 go 在 PATH 中。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$target_platform" ]]; then
|
||||
target_platform="$(go env GOOS)/$(go env GOARCH)"
|
||||
fi
|
||||
|
||||
if [[ "$target_platform" != */* ]]; then
|
||||
echo "❌ --platform 参数格式错误,应为 GOOS/GOARCH,例如 darwin/arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
goos="${target_platform%%/*}"
|
||||
goarch="${target_platform##*/}"
|
||||
platform_key="${goos}-${goarch}"
|
||||
platform_dir="$(platform_dir_name "$goos")"
|
||||
|
||||
declare -a drivers=()
|
||||
if [[ -n "$driver_csv" ]]; then
|
||||
IFS=',' read -r -a raw_drivers <<<"$driver_csv"
|
||||
for item in "${raw_drivers[@]}"; do
|
||||
normalized="$(normalize_driver "$item")" || {
|
||||
echo "❌ 不支持的驱动:$item"
|
||||
exit 1
|
||||
}
|
||||
drivers+=("$normalized")
|
||||
done
|
||||
else
|
||||
drivers=("${DEFAULT_DRIVERS[@]}")
|
||||
fi
|
||||
|
||||
output_dir="${out_root%/}/${platform_key}"
|
||||
bundle_stage_dir="$(mktemp -d "${TMPDIR:-/tmp}/gonavi-driver-bundle.XXXXXX")"
|
||||
bundle_platform_dir="$bundle_stage_dir/$platform_dir"
|
||||
|
||||
cleanup() {
|
||||
rm -rf "$bundle_stage_dir"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
mkdir -p "$output_dir" "$bundle_platform_dir"
|
||||
output_dir_abs="$(cd "$output_dir" && pwd)"
|
||||
bundle_zip_path="$output_dir_abs/$bundle_name"
|
||||
|
||||
declare -a built_assets=()
|
||||
declare -a failed_drivers=()
|
||||
declare -a skipped_drivers=()
|
||||
|
||||
echo "🚀 开始构建 optional-driver-agent"
|
||||
echo " 平台:$goos/$goarch"
|
||||
echo " 输出目录:$output_dir_abs"
|
||||
echo " 驱动列表:${drivers[*]}"
|
||||
|
||||
for driver in "${drivers[@]}"; do
|
||||
if [[ "$driver" == "duckdb" && "$goos" == "windows" && "$goarch" != "amd64" ]]; then
|
||||
echo "⚠️ 跳过 duckdb(仅支持 windows/amd64)"
|
||||
skipped_drivers+=("$driver")
|
||||
continue
|
||||
fi
|
||||
|
||||
build_driver="$(build_driver_name "$driver")"
|
||||
tag="gonavi_${build_driver}_driver"
|
||||
asset_name="${driver}-driver-agent-${goos}-${goarch}"
|
||||
if [[ "$goos" == "windows" ]]; then
|
||||
asset_name="${asset_name}.exe"
|
||||
fi
|
||||
output_path="$output_dir_abs/$asset_name"
|
||||
|
||||
cgo_enabled=0
|
||||
if [[ "$driver" == "duckdb" ]]; then
|
||||
cgo_enabled=1
|
||||
fi
|
||||
|
||||
echo "🔧 构建 $driver -> $asset_name (tag=$tag, CGO_ENABLED=$cgo_enabled)"
|
||||
set +e
|
||||
CGO_ENABLED="$cgo_enabled" GOOS="$goos" GOARCH="$goarch" GOTOOLCHAIN=auto \
|
||||
go build -tags "$tag" -trimpath -ldflags "-s -w" -o "$output_path" ./cmd/optional-driver-agent
|
||||
build_exit=$?
|
||||
set -e
|
||||
|
||||
if [[ $build_exit -ne 0 ]]; then
|
||||
echo "❌ 构建失败:$driver"
|
||||
failed_drivers+=("$driver")
|
||||
if [[ "$strict_mode" == "true" ]]; then
|
||||
exit $build_exit
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
|
||||
cp "$output_path" "$bundle_platform_dir/$asset_name"
|
||||
built_assets+=("$asset_name")
|
||||
done
|
||||
|
||||
if [[ ${#built_assets[@]} -eq 0 ]]; then
|
||||
echo "❌ 未成功构建任何驱动代理。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$bundle_zip_path"
|
||||
if command -v zip >/dev/null 2>&1; then
|
||||
(
|
||||
cd "$bundle_stage_dir"
|
||||
zip -qry "$bundle_zip_path" "$platform_dir"
|
||||
)
|
||||
elif command -v ditto >/dev/null 2>&1; then
|
||||
(
|
||||
cd "$bundle_stage_dir"
|
||||
ditto -c -k --sequesterRsrc --keepParent "$platform_dir" "$bundle_zip_path"
|
||||
)
|
||||
else
|
||||
echo "❌ 未找到 zip/ditto,无法生成驱动总包 zip。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ 构建完成"
|
||||
echo " 单文件输出目录:$output_dir_abs"
|
||||
echo " 驱动总包:$bundle_zip_path"
|
||||
echo " 已构建:${built_assets[*]}"
|
||||
if [[ ${#skipped_drivers[@]} -gt 0 ]]; then
|
||||
echo " 已跳过:${skipped_drivers[*]}"
|
||||
fi
|
||||
if [[ ${#failed_drivers[@]} -gt 0 ]]; then
|
||||
echo "⚠️ 构建失败驱动:${failed_drivers[*]}"
|
||||
exit 2
|
||||
fi
|
||||
351
build-release.sh
351
build-release.sh
@@ -20,6 +20,75 @@ RED='\033[0;31m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
get_file_size_bytes() {
|
||||
local target="$1"
|
||||
if [ ! -f "$target" ]; then
|
||||
echo 0
|
||||
return
|
||||
fi
|
||||
if stat -f%z "$target" >/dev/null 2>&1; then
|
||||
stat -f%z "$target"
|
||||
return
|
||||
fi
|
||||
if stat -c%s "$target" >/dev/null 2>&1; then
|
||||
stat -c%s "$target"
|
||||
return
|
||||
fi
|
||||
wc -c <"$target" | tr -d '[:space:]'
|
||||
}
|
||||
|
||||
format_size_mb() {
|
||||
local bytes="${1:-0}"
|
||||
awk -v b="$bytes" 'BEGIN { printf "%.2fMB", b / 1024 / 1024 }'
|
||||
}
|
||||
|
||||
try_compress_binary_with_upx() {
|
||||
local exe_path="$1"
|
||||
local label="$2"
|
||||
if [ ! -f "$exe_path" ]; then
|
||||
echo -e "${RED} ❌ 未找到 ${label} 文件:$exe_path${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v upx >/dev/null 2>&1; then
|
||||
echo -e "${RED} ❌ 未找到 upx,${label} 必须进行压缩后才能继续打包。${NC}"
|
||||
case "$(uname -s)" in
|
||||
Darwin)
|
||||
echo " 安装命令: brew install upx"
|
||||
;;
|
||||
Linux)
|
||||
echo " 安装命令: sudo apt-get install -y upx-ucl (或对应发行版包管理器)"
|
||||
;;
|
||||
esac
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local before_bytes after_bytes
|
||||
before_bytes=$(get_file_size_bytes "$exe_path")
|
||||
echo " 🗜️ 正在使用 UPX 压缩 ${label}..."
|
||||
if upx --best --lzma --force "$exe_path" >/dev/null 2>&1; then
|
||||
if ! upx -t "$exe_path" >/dev/null 2>&1; then
|
||||
echo -e "${RED} ❌ UPX 校验失败:${label}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
after_bytes=$(get_file_size_bytes "$exe_path")
|
||||
if [ "$after_bytes" -lt "$before_bytes" ]; then
|
||||
local saved_bytes=$((before_bytes - after_bytes))
|
||||
echo " ✅ UPX 压缩完成: $(format_size_mb "$before_bytes") -> $(format_size_mb "$after_bytes"),减少 $(format_size_mb "$saved_bytes")"
|
||||
else
|
||||
echo " ℹ️ UPX 压缩完成: $(format_size_mb "$before_bytes") -> $(format_size_mb "$after_bytes")"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ UPX 压缩失败:${label}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
MAC_VOLICON_PATH="build/darwin/icon.icns"
|
||||
if [ ! -f "$MAC_VOLICON_PATH" ]; then
|
||||
MAC_VOLICON_PATH=""
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}🚀 开始构建 $APP_NAME $VERSION...${NC}"
|
||||
|
||||
# 清理并创建输出目录
|
||||
@@ -36,47 +105,101 @@ if [ $? -eq 0 ]; then
|
||||
|
||||
# 移动 .app 到 dist
|
||||
mv "$APP_SRC" "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
APP_BIN_PATH=$(find "$DIST_DIR/$APP_DEST_NAME/Contents/MacOS" -maxdepth 1 -type f -print -quit)
|
||||
if [ -n "$APP_BIN_PATH" ] && [ -f "$APP_BIN_PATH" ]; then
|
||||
echo -e "${YELLOW} ⚠️ macOS arm64 不再执行 UPX 压缩,保留原始主程序。${NC}"
|
||||
else
|
||||
echo -e "${RED} ❌ 未找到 macOS arm64 主程序文件。${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 创建 DMG
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (arm64)..."
|
||||
# 移除已存在的 DMG (以防万一)
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
|
||||
create-dmg \
|
||||
--volname "${APP_NAME} ${VERSION}" \
|
||||
--volicon "build/appicon.icns" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 检查是否生成了 rw.* 的临时文件并重命名 (create-dmg 有时会有此行为)
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到临时文件名,正在重命名...${NC}"
|
||||
mv "$RW_FILE" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
# Ad-hoc 代码签名(无 Apple Developer 账号时防止 Gatekeeper 报已损坏)
|
||||
echo " 🔏 正在对 .app 进行 ad-hoc 签名 (arm64)..."
|
||||
codesign --force --deep --sign - "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 创建 DMG
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (arm64)..."
|
||||
# 移除已存在的 DMG (以防万一)
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
# create-dmg 的 source 需要是“包含 .app 的目录”,不能直接传 .app 路径。
|
||||
STAGE_DIR=$(mktemp -d "$DIST_DIR/.dmg-stage-${APP_NAME}-${VERSION}-arm64.XXXXXX")
|
||||
if [ -z "$STAGE_DIR" ] || [ ! -d "$STAGE_DIR" ]; then
|
||||
echo -e "${RED} ❌ 创建 DMG 临时目录失败,跳过 DMG 打包。${NC}"
|
||||
else
|
||||
if command -v ditto &> /dev/null; then
|
||||
ditto "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
else
|
||||
cp -R "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
fi
|
||||
|
||||
# --sandbox-safe 会跳过 Finder 的 AppleScript 排版,避免打包过程中弹出/打开挂载窗口(CI/本地静默打包更友好)。
|
||||
CREATE_DMG_ARGS=(--volname "${APP_NAME} ${VERSION}" --format UDZO --sandbox-safe)
|
||||
if [ -n "$MAC_VOLICON_PATH" ]; then
|
||||
CREATE_DMG_ARGS+=(--volicon "$MAC_VOLICON_PATH")
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 macOS 卷图标 (build/darwin/icon.icns),跳过 --volicon。${NC}"
|
||||
fi
|
||||
|
||||
# 删除中间的 .app 文件,保持目录整洁
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
else
|
||||
echo -e "${RED} ❌ DMG 生成失败,请检查 create-dmg 输出。${NC}"
|
||||
create-dmg "${CREATE_DMG_ARGS[@]}" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$STAGE_DIR"
|
||||
|
||||
CREATE_DMG_EXIT_CODE=$?
|
||||
rm -rf "$STAGE_DIR"
|
||||
|
||||
if [ $CREATE_DMG_EXIT_CODE -ne 0 ]; then
|
||||
echo -e "${RED} ❌ create-dmg 执行失败 (exit=$CREATE_DMG_EXIT_CODE),保留 .app 以便排查。${NC}"
|
||||
else
|
||||
# create-dmg 可能会在失败时遗留 rw.*.dmg 中间产物;不要直接当作最终产物使用
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -maxdepth 1 -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 create-dmg 中间产物: $(basename "$RW_FILE"),正在转换为可分发 DMG...${NC}"
|
||||
hdiutil convert "$RW_FILE" -format UDZO -o "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
rm -f "$RW_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# 防御性:即使生成了目标文件,也要确保不是 UDRW(UDRW 在 Finder 下可能表现为“已损坏/无法打开”)
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
DMG_FORMAT=$(hdiutil imageinfo "$DIST_DIR/$DMG_NAME" 2>/dev/null | awk -F': ' '/^Format:/{print $2; exit}')
|
||||
if [ "$DMG_FORMAT" = "UDRW" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 UDRW(可写原始映像),正在转换为 UDZO...${NC}"
|
||||
TMP_UDZO="$DIST_DIR/.tmp.$DMG_NAME"
|
||||
rm -f "$TMP_UDZO"
|
||||
hdiutil convert "$DIST_DIR/$DMG_NAME" -format UDZO -o "$TMP_UDZO" >/dev/null 2>&1 && mv "$TMP_UDZO" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
hdiutil verify "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED} ❌ DMG 校验失败,保留 .app 以便排查。${NC}"
|
||||
else
|
||||
# 删除中间的 .app 文件,保持目录整洁
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具,跳过 DMG 打包,仅保留 .app。${NC}"
|
||||
echo " 安装命令: brew install create-dmg"
|
||||
fi
|
||||
else
|
||||
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo -e "${RED} ❌ DMG 生成失败,请检查 create-dmg 输出。${NC}"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具,跳过 DMG 打包,仅保留 .app。${NC}"
|
||||
echo " 安装命令: brew install create-dmg"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS arm64 构建失败。${NC}"
|
||||
fi
|
||||
|
||||
@@ -89,44 +212,96 @@ if [ $? -eq 0 ]; then
|
||||
DMG_NAME="${APP_NAME}-${VERSION}-mac-amd64.dmg"
|
||||
|
||||
mv "$APP_SRC" "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (amd64)..."
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
|
||||
create-dmg \
|
||||
--volname "${APP_NAME} ${VERSION}" \
|
||||
--volicon "build/appicon.icns" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 检查是否生成了 rw.* 的临时文件并重命名
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到临时文件名,正在重命名...${NC}"
|
||||
mv "$RW_FILE" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
else
|
||||
echo -e "${RED} ❌ DMG 生成失败。${NC}"
|
||||
fi
|
||||
APP_BIN_PATH=$(find "$DIST_DIR/$APP_DEST_NAME/Contents/MacOS" -maxdepth 1 -type f -print -quit)
|
||||
if [ -n "$APP_BIN_PATH" ] && [ -f "$APP_BIN_PATH" ]; then
|
||||
echo -e "${YELLOW} ⚠️ macOS amd64 不再执行 UPX 压缩,保留原始主程序。${NC}"
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具。${NC}"
|
||||
echo -e "${RED} ❌ 未找到 macOS amd64 主程序文件。${NC}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS amd64 构建失败。${NC}"
|
||||
|
||||
# Ad-hoc 代码签名
|
||||
echo " 🔏 正在对 .app 进行 ad-hoc 签名 (amd64)..."
|
||||
codesign --force --deep --sign - "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (amd64)..."
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
# create-dmg 的 source 需要是“包含 .app 的目录”,不能直接传 .app 路径。
|
||||
STAGE_DIR=$(mktemp -d "$DIST_DIR/.dmg-stage-${APP_NAME}-${VERSION}-amd64.XXXXXX")
|
||||
if [ -z "$STAGE_DIR" ] || [ ! -d "$STAGE_DIR" ]; then
|
||||
echo -e "${RED} ❌ 创建 DMG 临时目录失败,跳过 DMG 打包。${NC}"
|
||||
else
|
||||
if command -v ditto &> /dev/null; then
|
||||
ditto "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
else
|
||||
cp -R "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
fi
|
||||
|
||||
# --sandbox-safe 会跳过 Finder 的 AppleScript 排版,避免打包过程中弹出/打开挂载窗口(CI/本地静默打包更友好)。
|
||||
CREATE_DMG_ARGS=(--volname "${APP_NAME} ${VERSION}" --format UDZO --sandbox-safe)
|
||||
if [ -n "$MAC_VOLICON_PATH" ]; then
|
||||
CREATE_DMG_ARGS+=(--volicon "$MAC_VOLICON_PATH")
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 macOS 卷图标 (build/darwin/icon.icns),跳过 --volicon。${NC}"
|
||||
fi
|
||||
|
||||
create-dmg "${CREATE_DMG_ARGS[@]}" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$STAGE_DIR"
|
||||
|
||||
CREATE_DMG_EXIT_CODE=$?
|
||||
rm -rf "$STAGE_DIR"
|
||||
|
||||
if [ $CREATE_DMG_EXIT_CODE -ne 0 ]; then
|
||||
echo -e "${RED} ❌ create-dmg 执行失败 (exit=$CREATE_DMG_EXIT_CODE),保留 .app 以便排查。${NC}"
|
||||
else
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -maxdepth 1 -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 create-dmg 中间产物: $(basename "$RW_FILE"),正在转换为可分发 DMG...${NC}"
|
||||
hdiutil convert "$RW_FILE" -format UDZO -o "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
rm -f "$RW_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
DMG_FORMAT=$(hdiutil imageinfo "$DIST_DIR/$DMG_NAME" 2>/dev/null | awk -F': ' '/^Format:/{print $2; exit}')
|
||||
if [ "$DMG_FORMAT" = "UDRW" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 UDRW(可写原始映像),正在转换为 UDZO...${NC}"
|
||||
TMP_UDZO="$DIST_DIR/.tmp.$DMG_NAME"
|
||||
rm -f "$TMP_UDZO"
|
||||
hdiutil convert "$DIST_DIR/$DMG_NAME" -format UDZO -o "$TMP_UDZO" >/dev/null 2>&1 && mv "$TMP_UDZO" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
hdiutil verify "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED} ❌ DMG 校验失败,保留 .app 以便排查。${NC}"
|
||||
else
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo -e "${RED} ❌ DMG 生成失败。${NC}"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具。${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS amd64 构建失败。${NC}"
|
||||
fi
|
||||
|
||||
# --- Windows AMD64 构建 ---
|
||||
@@ -134,7 +309,9 @@ echo -e "${GREEN}🪟 正在构建 Windows (amd64)...${NC}"
|
||||
if command -v x86_64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
TARGET_EXE="$DIST_DIR/${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$TARGET_EXE"
|
||||
try_compress_binary_with_upx "$TARGET_EXE" "Windows amd64 可执行文件"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows amd64 构建失败。${NC}"
|
||||
@@ -148,7 +325,9 @@ echo -e "${GREEN}🪟 正在构建 Windows (arm64)...${NC}"
|
||||
if command -v aarch64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
TARGET_EXE="$DIST_DIR/${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$TARGET_EXE"
|
||||
echo -e "${YELLOW} ⚠️ 当前 UPX 不支持 win64/arm64,跳过 Windows arm64 压缩。${NC}"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows arm64 构建失败。${NC}"
|
||||
@@ -168,8 +347,10 @@ if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "x86_64" ]; then
|
||||
# 本机 Linux amd64,直接构建
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux amd64 可执行文件"
|
||||
# 打包为 tar.gz
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
@@ -186,8 +367,10 @@ elif command -v x86_64-linux-gnu-gcc &> /dev/null; then
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux amd64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
@@ -208,8 +391,10 @@ if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "aarch64" ]; then
|
||||
# 本机 Linux arm64,直接构建
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux arm64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
@@ -225,8 +410,10 @@ elif command -v aarch64-linux-gnu-gcc &> /dev/null; then
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux arm64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
|
||||
@@ -2,10 +2,13 @@ package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
@@ -16,6 +19,7 @@ type agentRequest struct {
|
||||
Method string `json:"method"`
|
||||
Config *connection.ConnectionConfig `json:"config,omitempty"`
|
||||
Query string `json:"query,omitempty"`
|
||||
TimeoutMs int64 `json:"timeoutMs,omitempty"`
|
||||
DBName string `json:"dbName,omitempty"`
|
||||
TableName string `json:"tableName,omitempty"`
|
||||
Changes *connection.ChangeSet `json:"changes,omitempty"`
|
||||
@@ -47,6 +51,8 @@ const (
|
||||
agentMethodApplyChanges = "applyChanges"
|
||||
)
|
||||
|
||||
const legacyClickHouseDefaultTimeout = 2 * time.Hour
|
||||
|
||||
var (
|
||||
agentDriverType string
|
||||
agentDatabaseFactory func() db.Database
|
||||
@@ -137,14 +143,14 @@ func handleRequest(inst *db.Database, req agentRequest) agentResponse {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
case agentMethodQuery:
|
||||
data, fields, err := (*inst).Query(req.Query)
|
||||
data, fields, err := queryWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
resp.Fields = fields
|
||||
case agentMethodExec:
|
||||
affected, err := (*inst).Exec(req.Query)
|
||||
affected, err := execWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
@@ -218,7 +224,11 @@ func handleRequest(inst *db.Database, req agentRequest) agentResponse {
|
||||
}
|
||||
|
||||
func writeResponse(writer *bufio.Writer, resp agentResponse) error {
|
||||
payload, err := json.Marshal(resp)
|
||||
// 对响应数据做统一 JSON 安全归一化:
|
||||
// 将 map[any]any(如 duckdb.Map)递归转换为 map[string]any,避免序列化失败导致代理进程退出。
|
||||
safeResp := resp
|
||||
safeResp.Data = normalizeAgentResponseData(resp.Data)
|
||||
payload, err := json.Marshal(safeResp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -234,3 +244,87 @@ func fail(resp agentResponse, errText string) agentResponse {
|
||||
resp.Error = strings.TrimSpace(errText)
|
||||
return resp
|
||||
}
|
||||
|
||||
func normalizeAgentResponseData(v interface{}) interface{} {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Pointer, reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return normalizeAgentResponseData(rv.Elem().Interface())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
out := make(map[string]interface{}, rv.Len())
|
||||
iter := rv.MapRange()
|
||||
for iter.Next() {
|
||||
out[fmt.Sprint(iter.Key().Interface())] = normalizeAgentResponseData(iter.Value().Interface())
|
||||
}
|
||||
return out
|
||||
case reflect.Slice:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
// 保持 []byte 原样,避免改变现有二进制列的 JSON 编码行为(base64)。
|
||||
if rv.Type().Elem().Kind() == reflect.Uint8 {
|
||||
return v
|
||||
}
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
case reflect.Array:
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func queryWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) ([]map[string]interface{}, []string, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Query(query)
|
||||
}
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
func execWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) (int64, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Exec(query)
|
||||
}
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
|
||||
172
cmd/optional-driver-agent/main_test.go
Normal file
172
cmd/optional-driver-agent/main_test.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type duckMapLike map[any]any
|
||||
|
||||
func TestWriteResponse_NormalizesMapAnyAny(t *testing.T) {
|
||||
resp := agentResponse{
|
||||
ID: 1,
|
||||
Success: true,
|
||||
Data: []map[string]interface{}{
|
||||
{
|
||||
"id": int64(7),
|
||||
"meta": duckMapLike{"k": "v", 2: "two"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var out bytes.Buffer
|
||||
writer := bufio.NewWriter(&out)
|
||||
if err := writeResponse(writer, resp); err != nil {
|
||||
t.Fatalf("writeResponse 返回错误: %v", err)
|
||||
}
|
||||
|
||||
var decoded struct {
|
||||
Data []map[string]interface{} `json:"data"`
|
||||
}
|
||||
if err := json.Unmarshal(bytes.TrimSpace(out.Bytes()), &decoded); err != nil {
|
||||
t.Fatalf("解码响应失败: %v", err)
|
||||
}
|
||||
|
||||
if len(decoded.Data) != 1 {
|
||||
t.Fatalf("期望 1 行数据,实际 %d", len(decoded.Data))
|
||||
}
|
||||
meta, ok := decoded.Data[0]["meta"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("meta 字段类型异常: %T", decoded.Data[0]["meta"])
|
||||
}
|
||||
if meta["k"] != "v" {
|
||||
t.Fatalf("字符串 key 转换异常: %v", meta["k"])
|
||||
}
|
||||
if meta["2"] != "two" {
|
||||
t.Fatalf("数字 key 未字符串化: %v", meta["2"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeAgentResponseData_KeepByteSlice(t *testing.T) {
|
||||
raw := []byte{0x61, 0x62, 0x63}
|
||||
normalized := normalizeAgentResponseData(raw)
|
||||
out, ok := normalized.([]byte)
|
||||
if !ok {
|
||||
t.Fatalf("期望 []byte,实际 %T", normalized)
|
||||
}
|
||||
if !bytes.Equal(out, raw) {
|
||||
t.Fatalf("[]byte 内容被意外改写: %v", out)
|
||||
}
|
||||
}
|
||||
|
||||
type fakeAgentTimeoutDB struct {
|
||||
queryCalled bool
|
||||
queryContextCalled bool
|
||||
execCalled bool
|
||||
execContextCalled bool
|
||||
deadlineSet bool
|
||||
}
|
||||
|
||||
func (f *fakeAgentTimeoutDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Close() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Ping() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryCalled = true
|
||||
return nil, nil, errors.New("query should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return []map[string]interface{}{{"ok": 1}}, []string{"ok"}, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) Exec(query string) (int64, error) {
|
||||
f.execCalled = true
|
||||
return 0, errors.New("exec should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
f.execContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return 3, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeAgentTimeoutDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_UsesQueryContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
data, fields, err := queryWithOptionalTimeout(fake, "SELECT 1", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("queryWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if len(data) != 1 || len(fields) != 1 || fields[0] != "ok" {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回数据异常: data=%v fields=%v", data, fields)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecWithOptionalTimeout_UsesExecContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
affected, err := execWithOptionalTimeout(fake, "DELETE FROM t", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("execWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.execContextCalled || fake.execCalled {
|
||||
t.Fatalf("exec 调用路径异常,ExecContext=%v Exec=%v", fake.execContextCalled, fake.execCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("execWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if affected != 3 {
|
||||
t.Fatalf("受影响行数异常,want=3 got=%d", affected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_ClickHouseLegacyModeUsesQueryContext(t *testing.T) {
|
||||
old := agentDriverType
|
||||
agentDriverType = "clickhouse"
|
||||
defer func() { agentDriverType = old }()
|
||||
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
_, _, err := queryWithOptionalTimeout(fake, "SELECT 1", 0)
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("clickhouse legacy query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_clickhouse_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "clickhouse"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.ClickHouseDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mongodb_driver_v1
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mongodb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MongoDBV1{}
|
||||
}
|
||||
}
|
||||
@@ -7,11 +7,11 @@
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/mariadb"
|
||||
},
|
||||
"diros": {
|
||||
"doris": {
|
||||
"engine": "go",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/diros"
|
||||
"downloadUrl": "builtin://activate/doris"
|
||||
},
|
||||
"sphinx": {
|
||||
"engine": "go",
|
||||
@@ -33,7 +33,7 @@
|
||||
},
|
||||
"duckdb": {
|
||||
"engine": "go",
|
||||
"version": "2.5.5",
|
||||
"version": "2.5.6",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/duckdb"
|
||||
},
|
||||
@@ -73,6 +73,12 @@
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/tdengine"
|
||||
},
|
||||
"clickhouse": {
|
||||
"engine": "go",
|
||||
"version": "2.43.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/clickhouse"
|
||||
},
|
||||
"postgres": {
|
||||
"engine": "go",
|
||||
"version": "1.11.1",
|
||||
|
||||
@@ -5,6 +5,23 @@
|
||||
<link rel="icon" type="image/svg+xml" href="/logo.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>GoNavi</title>
|
||||
<script>
|
||||
if (typeof window !== 'undefined' && !window.go) {
|
||||
window.go = {
|
||||
app: {
|
||||
App: new Proxy({}, { get: () => async () => ({ success: false }) })
|
||||
}
|
||||
};
|
||||
}
|
||||
if (typeof window !== 'undefined' && !window.runtime) {
|
||||
window.runtime = new Proxy({}, {
|
||||
get: (target, prop) => {
|
||||
if (prop === 'Environment') return async () => ({ platform: 'darwin' });
|
||||
return typeof prop === 'string' && prop.startsWith('WindowIs') ? () => false : () => {};
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
@@ -37,6 +37,91 @@ body, #root {
|
||||
padding-right: 8px;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-list-holder-inner,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-list-holder-inner .ant-tree-treenode {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper {
|
||||
min-height: 36px;
|
||||
border-radius: 14px;
|
||||
transition: background-color 0.2s ease, border-color 0.2s ease, color 0.2s ease;
|
||||
background: transparent !important;
|
||||
border: none !important;
|
||||
box-shadow: none !important;
|
||||
outline: none !important;
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
width: auto !important;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper:hover,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper:active,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper:focus,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper:focus-visible,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected:hover {
|
||||
background: transparent !important;
|
||||
border-color: transparent !important;
|
||||
box-shadow: none !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-treenode {
|
||||
padding: 2px 0;
|
||||
width: 100%;
|
||||
border-radius: 14px;
|
||||
transition: background-color 0.2s ease, border-color 0.2s ease, color 0.2s ease;
|
||||
border: none;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
z-index: 0;
|
||||
display: flex !important;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-switcher {
|
||||
width: 0 !important;
|
||||
min-width: 0 !important;
|
||||
margin-inline-end: 0 !important;
|
||||
padding: 0 !important;
|
||||
overflow: hidden !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-switcher:hover,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-switcher:active,
|
||||
.redis-viewer-workbench .ant-tree .ant-tree-switcher:focus {
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .redis-tree-expander-button:hover,
|
||||
.redis-viewer-workbench .redis-tree-expander-button:focus-visible {
|
||||
background: transparent !important;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-radio-group .ant-radio-button-wrapper {
|
||||
border-radius: 10px;
|
||||
margin-inline-end: 6px;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-radio-group .ant-radio-button-wrapper:last-child {
|
||||
margin-inline-end: 0;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-table {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.redis-viewer-workbench .ant-table-wrapper .ant-table-thead > tr > th {
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
/* Scrollbar styling for dark mode */
|
||||
body[data-theme='dark'] ::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
@@ -57,6 +142,29 @@ body[data-theme='dark'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: #666;
|
||||
}
|
||||
|
||||
/* Scrollbar styling for light mode (transparent-friendly) */
|
||||
body[data-theme='light'] ::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.18);
|
||||
border-radius: 4px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(0, 0, 0, 0.30);
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
|
||||
/* Ensure body background matches theme to avoid white flashes, but kept transparent for window composition */
|
||||
body {
|
||||
transition: color 0.3s;
|
||||
@@ -67,6 +175,96 @@ body[data-theme='dark'] {
|
||||
在透明窗口环境下会显著加剧 GPU 负载 */
|
||||
}
|
||||
|
||||
/* 暗色 + 透明:提升选中/焦点可读性,避免默认蓝色在半透明背景下发灰 */
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected,
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected:hover {
|
||||
background: rgba(246, 196, 83, 0.24) !important;
|
||||
color: rgba(255, 236, 179, 0.98) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .redis-viewer-workbench .ant-tree .ant-tree-treenode:hover {
|
||||
background: rgba(255, 255, 255, 0.05) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .redis-viewer-workbench .ant-tree .ant-tree-treenode.ant-tree-treenode-selected,
|
||||
body[data-theme='dark'] .redis-viewer-workbench .ant-tree .ant-tree-treenode.ant-tree-treenode-selected:hover {
|
||||
background: linear-gradient(90deg, rgba(246, 196, 83, 0.22), rgba(246, 196, 83, 0.08)) !important;
|
||||
border: 1px solid rgba(246, 196, 83, 0.24) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-checked .ant-checkbox-inner {
|
||||
background-color: #f6c453 !important;
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-indeterminate .ant-checkbox-inner::after {
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox:hover .ant-checkbox-inner,
|
||||
body[data-theme='dark'] .ant-checkbox-wrapper:hover .ant-checkbox-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-checked .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-wrapper:hover .ant-radio-inner,
|
||||
body[data-theme='dark'] .ant-radio:hover .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-switch.ant-switch-checked {
|
||||
background: #d8a93b !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.18) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected:hover > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected:hover > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.26) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .redis-viewer-workbench .ant-radio-button-wrapper {
|
||||
background: rgba(255, 255, 255, 0.04);
|
||||
border-color: rgba(255, 255, 255, 0.08);
|
||||
color: rgba(230, 234, 242, 0.9);
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .redis-viewer-workbench .ant-radio-button-wrapper-checked:not(.ant-radio-button-wrapper-disabled) {
|
||||
background: rgba(246, 196, 83, 0.16);
|
||||
border-color: rgba(246, 196, 83, 0.3);
|
||||
color: #f6c453;
|
||||
}
|
||||
|
||||
body[data-theme='light'] .redis-viewer-workbench .ant-tree .ant-tree-treenode:hover {
|
||||
background: rgba(15, 23, 42, 0.04) !important;
|
||||
}
|
||||
|
||||
body[data-theme='light'] .redis-viewer-workbench .ant-tree .ant-tree-treenode.ant-tree-treenode-selected,
|
||||
body[data-theme='light'] .redis-viewer-workbench .ant-tree .ant-tree-treenode.ant-tree-treenode-selected:hover {
|
||||
color: rgba(15, 23, 42, 0.92) !important;
|
||||
background: linear-gradient(90deg, rgba(22, 119, 255, 0.12), rgba(22, 119, 255, 0.04)) !important;
|
||||
border: 1px solid rgba(22, 119, 255, 0.18) !important;
|
||||
}
|
||||
|
||||
body[data-theme='light'] .redis-viewer-workbench .ant-radio-button-wrapper {
|
||||
background: rgba(255, 255, 255, 0.72);
|
||||
border-color: rgba(15, 23, 42, 0.08);
|
||||
color: rgba(51, 65, 85, 0.88);
|
||||
}
|
||||
|
||||
body[data-theme='light'] .redis-viewer-workbench .ant-radio-button-wrapper-checked:not(.ant-radio-button-wrapper-disabled) {
|
||||
background: rgba(22, 119, 255, 0.1);
|
||||
border-color: rgba(22, 119, 255, 0.22);
|
||||
color: #1677ff;
|
||||
}
|
||||
|
||||
/* 连接配置弹窗:滚动仅在弹窗 body 内部,不使用外层 wrap 滚动条 */
|
||||
.connection-modal-wrap {
|
||||
overflow: hidden !important;
|
||||
@@ -92,3 +290,53 @@ body[data-theme='dark'] {
|
||||
background-color: #ff4d4f !important;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
/* 驱动管理:统一关闭 antd sticky 横向条,仅保留自定义独立横向条 */
|
||||
.driver-manager-table .ant-table-sticky-scroll {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* 仅在独立横向条激活时隐藏表格自身横向滚动条,避免出现双横向条 */
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body {
|
||||
overflow-x: auto !important;
|
||||
-ms-overflow-style: none;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content::-webkit-scrollbar:horizontal,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body::-webkit-scrollbar:horizontal {
|
||||
height: 0 !important;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap {
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.driver-manager-footer {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.driver-manager-footer-actions {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll {
|
||||
width: 100%;
|
||||
height: 12px;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
scrollbar-gutter: stable;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll-inner {
|
||||
height: 1px;
|
||||
}
|
||||
|
||||
1671
frontend/src/App.tsx
1671
frontend/src/App.tsx
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import React, { useState, useEffect, useMemo, useRef } from 'react';
|
||||
import { Modal, Form, Select, Input, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs, theme as antdTheme } from 'antd';
|
||||
import { DatabaseOutlined, RocketOutlined, SwapOutlined, TableOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
const { Title, Text } = Typography;
|
||||
const { Step } = Steps;
|
||||
@@ -21,6 +23,12 @@ type TableDiffSummary = {
|
||||
deletes?: number;
|
||||
same?: number;
|
||||
message?: string;
|
||||
targetTableExists?: boolean;
|
||||
plannedAction?: string;
|
||||
warnings?: string[];
|
||||
unsupportedObjects?: string[];
|
||||
indexesToCreate?: number;
|
||||
indexesSkipped?: number;
|
||||
};
|
||||
type TableOps = {
|
||||
insert: boolean;
|
||||
@@ -31,10 +39,135 @@ type TableOps = {
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
type WorkflowType = 'sync' | 'migration';
|
||||
|
||||
const quoteSqlIdent = (dbType: string, ident: string): string => {
|
||||
const raw = String(ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'mysql' || t === 'mariadb' || t === 'diros' || t === 'sphinx' || t === 'clickhouse' || t === 'tdengine') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
if (t === 'sqlserver') {
|
||||
return `[${raw.replace(/]/g, ']]')}]`;
|
||||
}
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
|
||||
const quoteSqlTable = (dbType: string, tableName: string): string => {
|
||||
const raw = String(tableName || '').trim();
|
||||
if (!raw) return raw;
|
||||
if (!raw.includes('.')) return quoteSqlIdent(dbType, raw);
|
||||
return raw
|
||||
.split('.')
|
||||
.map((part) => quoteSqlIdent(dbType, part))
|
||||
.join('.');
|
||||
};
|
||||
|
||||
const toSqlLiteral = (value: any, dbType: string): string => {
|
||||
if (value === null || value === undefined) return 'NULL';
|
||||
if (typeof value === 'number') return Number.isFinite(value) ? String(value) : 'NULL';
|
||||
if (typeof value === 'bigint') return value.toString();
|
||||
if (typeof value === 'boolean') {
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'sqlserver') return value ? '1' : '0';
|
||||
return value ? 'TRUE' : 'FALSE';
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return `'${value.toISOString().replace(/'/g, "''")}'`;
|
||||
}
|
||||
if (typeof value === 'object') {
|
||||
try {
|
||||
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
|
||||
} catch {
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
}
|
||||
}
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
};
|
||||
|
||||
const resolveRedisDbIndex = (raw?: string): number => {
|
||||
const value = Number(String(raw || '').trim());
|
||||
return Number.isInteger(value) && value >= 0 && value <= 15 ? value : 0;
|
||||
};
|
||||
|
||||
const buildSqlPreview = (
|
||||
previewData: any,
|
||||
tableName: string,
|
||||
dbType: string,
|
||||
ops?: TableOps,
|
||||
): { sqlText: string; statementCount: number } => {
|
||||
if (!previewData || !tableName) return { sqlText: '', statementCount: 0 };
|
||||
const tableExpr = quoteSqlTable(dbType, tableName);
|
||||
const pkCol = String(previewData.pkColumn || 'id');
|
||||
const statements: string[] = [];
|
||||
|
||||
const insertRows = Array.isArray(previewData.inserts) ? previewData.inserts : [];
|
||||
const updateRows = Array.isArray(previewData.updates) ? previewData.updates : [];
|
||||
const deleteRows = Array.isArray(previewData.deletes) ? previewData.deletes : [];
|
||||
|
||||
const selectedInsert = new Set((ops?.selectedInsertPks || []).map((v) => String(v)));
|
||||
const selectedUpdate = new Set((ops?.selectedUpdatePks || []).map((v) => String(v)));
|
||||
const selectedDelete = new Set((ops?.selectedDeletePks || []).map((v) => String(v)));
|
||||
|
||||
if (ops?.insert !== false) {
|
||||
insertRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedInsert.size > 0 && !selectedInsert.has(pk)) return;
|
||||
const row = rowWrap?.row || {};
|
||||
const columns = Object.keys(row);
|
||||
if (columns.length === 0) return;
|
||||
const colExpr = columns.map((c) => quoteSqlIdent(dbType, c)).join(', ');
|
||||
const valExpr = columns.map((c) => toSqlLiteral(row[c], dbType)).join(', ');
|
||||
statements.push(`INSERT INTO ${tableExpr} (${colExpr}) VALUES (${valExpr});`);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.update !== false) {
|
||||
updateRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedUpdate.size > 0 && !selectedUpdate.has(pk)) return;
|
||||
const source = rowWrap?.source || {};
|
||||
const changedColumns = Array.isArray(rowWrap?.changedColumns)
|
||||
? rowWrap.changedColumns
|
||||
: Object.keys(source).filter((k) => k !== pkCol);
|
||||
const setCols = changedColumns.filter((c: string) => String(c) !== pkCol);
|
||||
if (setCols.length === 0) return;
|
||||
const setExpr = setCols
|
||||
.map((c: string) => `${quoteSqlIdent(dbType, c)} = ${toSqlLiteral(source[c], dbType)}`)
|
||||
.join(', ');
|
||||
statements.push(
|
||||
`UPDATE ${tableExpr} SET ${setExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.delete) {
|
||||
deleteRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedDelete.size > 0 && !selectedDelete.has(pk)) return;
|
||||
statements.push(
|
||||
`DELETE FROM ${tableExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
sqlText: statements.join('\n'),
|
||||
statementCount: statements.length,
|
||||
};
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const themeMode = useStore((state) => state.theme);
|
||||
const appearance = useStore((state) => state.appearance);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { token } = antdTheme.useToken();
|
||||
const darkMode = themeMode === 'dark';
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const effectiveOpacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
|
||||
// Step 1: Config
|
||||
const [sourceConnId, setSourceConnId] = useState<string>('');
|
||||
@@ -50,9 +183,13 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const [selectedTables, setSelectedTables] = useState<string[]>([]);
|
||||
|
||||
// Options
|
||||
const [workflowType, setWorkflowType] = useState<WorkflowType>('sync');
|
||||
const [syncContent, setSyncContent] = useState<'data' | 'schema' | 'both'>('data');
|
||||
const [syncMode, setSyncMode] = useState<string>('insert_update');
|
||||
const [autoAddColumns, setAutoAddColumns] = useState<boolean>(true);
|
||||
const [targetTableStrategy, setTargetTableStrategy] = useState<'existing_only' | 'auto_create_if_missing' | 'smart'>('existing_only');
|
||||
const [createIndexes, setCreateIndexes] = useState<boolean>(false);
|
||||
const [mongoCollectionName, setMongoCollectionName] = useState<string>('');
|
||||
const [showSameTables, setShowSameTables] = useState<boolean>(false);
|
||||
const [analyzing, setAnalyzing] = useState<boolean>(false);
|
||||
const [diffTables, setDiffTables] = useState<TableDiffSummary[]>([]);
|
||||
@@ -128,9 +265,12 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setSourceDb('');
|
||||
setTargetDb('');
|
||||
setSelectedTables([]);
|
||||
setWorkflowType('sync');
|
||||
setSyncContent('data');
|
||||
setSyncMode('insert_update');
|
||||
setAutoAddColumns(true);
|
||||
setTargetTableStrategy('existing_only');
|
||||
setCreateIndexes(false);
|
||||
setShowSameTables(false);
|
||||
setAnalyzing(false);
|
||||
setDiffTables([]);
|
||||
@@ -148,36 +288,66 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (workflowType === 'migration') {
|
||||
if (syncMode === 'insert_update') {
|
||||
setSyncMode('insert_only');
|
||||
}
|
||||
if (syncContent === 'schema') {
|
||||
setSyncContent('both');
|
||||
}
|
||||
if (targetTableStrategy === 'existing_only') {
|
||||
setTargetTableStrategy('smart');
|
||||
}
|
||||
if (!createIndexes) {
|
||||
setCreateIndexes(true);
|
||||
}
|
||||
} else {
|
||||
if (targetTableStrategy !== 'existing_only') {
|
||||
setTargetTableStrategy('existing_only');
|
||||
}
|
||||
if (createIndexes) {
|
||||
setCreateIndexes(false);
|
||||
}
|
||||
}
|
||||
}, [workflowType]);
|
||||
|
||||
const handleSourceConnChange = async (connId: string) => {
|
||||
setSourceConnId(connId);
|
||||
setSourceDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setSourceDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleTargetConnChange = async (connId: string) => {
|
||||
setTargetConnId(connId);
|
||||
setTargetDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setTargetDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const nextToTables = async () => {
|
||||
@@ -189,14 +359,17 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tables = (res.data as any[]).map((row: any) => row.Table || row.table || row.TABLE_NAME || Object.values(row)[0]);
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tableRows = Array.isArray(res.data) ? res.data : [];
|
||||
const tables = tableRows
|
||||
.map((row: any) => row?.Table || row?.table || row?.TABLE_NAME || Object.values(row || {})[0])
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== '');
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
message.error(res.message);
|
||||
}
|
||||
}
|
||||
@@ -236,6 +409,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: syncContent,
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
jobId,
|
||||
};
|
||||
|
||||
@@ -286,6 +462,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: "data",
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -362,6 +541,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: syncContent,
|
||||
mode: syncMode,
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
tableOptions,
|
||||
jobId,
|
||||
};
|
||||
@@ -402,10 +584,139 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
);
|
||||
};
|
||||
|
||||
const previewSql = useMemo(() => {
|
||||
if (!previewData || !previewTable) return { sqlText: '', statementCount: 0 };
|
||||
const targetType = String(connections.find(c => c.id === targetConnId)?.config?.type || '');
|
||||
const ops = tableOptions[previewTable] || { insert: true, update: true, delete: false };
|
||||
return buildSqlPreview(previewData, previewTable, targetType, ops);
|
||||
}, [previewData, previewTable, targetConnId, connections, tableOptions]);
|
||||
|
||||
const analysisWarnings = useMemo(() => {
|
||||
const items: string[] = [];
|
||||
diffTables.forEach((table) => {
|
||||
(table.warnings || []).forEach((warning) => items.push(`${table.table}: ${warning}`));
|
||||
(table.unsupportedObjects || []).forEach((warning) => items.push(`${table.table}: ${warning}`));
|
||||
});
|
||||
return Array.from(new Set(items));
|
||||
}, [diffTables]);
|
||||
|
||||
const isMigrationWorkflow = workflowType === 'migration';
|
||||
const sourceConn = useMemo(() => connections.find(c => c.id === sourceConnId), [connections, sourceConnId]);
|
||||
const targetConn = useMemo(() => connections.find(c => c.id === targetConnId), [connections, targetConnId]);
|
||||
const sourceType = String(sourceConn?.config?.type || '').toLowerCase();
|
||||
const targetType = String(targetConn?.config?.type || '').toLowerCase();
|
||||
const isRedisMongoKeyspaceMigration = isMigrationWorkflow && (
|
||||
(sourceType === 'redis' && targetType === 'mongodb') ||
|
||||
(sourceType === 'mongodb' && targetType === 'redis')
|
||||
);
|
||||
const defaultMongoCollectionName = useMemo(() => {
|
||||
if (sourceType === 'redis' && targetType === 'mongodb') {
|
||||
return `redis_db_${resolveRedisDbIndex(sourceDb || sourceConn?.config?.database)}_keys`;
|
||||
}
|
||||
if (sourceType === 'mongodb' && targetType === 'redis') {
|
||||
return selectedTables[0] || `redis_db_${resolveRedisDbIndex(targetDb || targetConn?.config?.database)}_keys`;
|
||||
}
|
||||
return '';
|
||||
}, [sourceType, targetType, sourceDb, targetDb, sourceConn, targetConn, selectedTables]);
|
||||
|
||||
const modalPanelStyle = useMemo(() => ({
|
||||
background: darkMode
|
||||
? 'linear-gradient(180deg, rgba(16,22,34,0.96) 0%, rgba(10,14,24,0.98) 100%)'
|
||||
: 'linear-gradient(180deg, rgba(255,255,255,0.98) 0%, rgba(246,248,252,0.98) 100%)',
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(16,24,40,0.08)',
|
||||
boxShadow: darkMode ? '0 24px 56px rgba(0,0,0,0.36)' : '0 18px 44px rgba(15,23,42,0.14)',
|
||||
backdropFilter: darkMode ? 'blur(18px)' : 'none',
|
||||
}), [darkMode]);
|
||||
|
||||
const shellCardStyle = useMemo<React.CSSProperties>(() => ({
|
||||
borderRadius: 18,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.03)' : `rgba(255,255,255,${Math.max(effectiveOpacity, 0.88)})`,
|
||||
boxShadow: darkMode ? '0 12px 32px rgba(0,0,0,0.22)' : '0 10px 24px rgba(15,23,42,0.08)',
|
||||
overflow: 'hidden',
|
||||
}), [darkMode, effectiveOpacity]);
|
||||
|
||||
const heroPanelStyle = useMemo<React.CSSProperties>(() => ({
|
||||
padding: 18,
|
||||
borderRadius: 18,
|
||||
border: darkMode ? '1px solid rgba(255,214,102,0.12)' : '1px solid rgba(24,144,255,0.12)',
|
||||
background: darkMode
|
||||
? 'linear-gradient(135deg, rgba(255,214,102,0.10) 0%, rgba(255,255,255,0.03) 100%)'
|
||||
: 'linear-gradient(135deg, rgba(24,144,255,0.10) 0%, rgba(255,255,255,0.95) 100%)',
|
||||
marginBottom: 18,
|
||||
}), [darkMode]);
|
||||
|
||||
const badgeStyle = useMemo<React.CSSProperties>(() => ({
|
||||
display: 'inline-flex',
|
||||
alignItems: 'center',
|
||||
gap: 6,
|
||||
padding: '6px 10px',
|
||||
borderRadius: 999,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.10)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.04)' : 'rgba(255,255,255,0.86)',
|
||||
color: darkMode ? 'rgba(255,255,255,0.88)' : '#334155',
|
||||
fontSize: 12,
|
||||
fontWeight: 600,
|
||||
}), [darkMode]);
|
||||
|
||||
const quietPanelStyle = useMemo<React.CSSProperties>(() => ({
|
||||
padding: 14,
|
||||
borderRadius: 16,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.025)' : 'rgba(248,250,252,0.92)',
|
||||
}), [darkMode]);
|
||||
|
||||
const modalWorkspaceStyle = useMemo<React.CSSProperties>(() => ({
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
height: '100%',
|
||||
minHeight: 0,
|
||||
}), []);
|
||||
|
||||
const modalScrollableContentStyle = useMemo<React.CSSProperties>(() => ({
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
overflowY: 'auto',
|
||||
overflowX: 'hidden',
|
||||
paddingRight: 4,
|
||||
overscrollBehavior: 'contain',
|
||||
}), []);
|
||||
|
||||
const modalFooterBarStyle = useMemo<React.CSSProperties>(() => ({
|
||||
marginTop: 18,
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
gap: 8,
|
||||
paddingTop: 12,
|
||||
borderTop: darkMode ? '1px solid rgba(255,255,255,0.06)' : '1px solid rgba(15,23,42,0.06)',
|
||||
flex: '0 0 auto',
|
||||
}), [darkMode]);
|
||||
|
||||
const renderModalTitle = (title: string, description: string) => (
|
||||
<div style={{ display: 'flex', alignItems: 'flex-start', gap: 12 }}>
|
||||
<div style={{
|
||||
width: 38,
|
||||
height: 38,
|
||||
borderRadius: 14,
|
||||
display: 'grid',
|
||||
placeItems: 'center',
|
||||
background: darkMode ? 'rgba(255,214,102,0.12)' : 'rgba(24,144,255,0.10)',
|
||||
color: darkMode ? '#ffd666' : token.colorPrimary,
|
||||
flexShrink: 0,
|
||||
}}>
|
||||
{isMigrationWorkflow ? <RocketOutlined /> : <SwapOutlined />}
|
||||
</div>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontSize: 16, fontWeight: 700, color: darkMode ? '#f8fafc' : '#0f172a' }}>{title}</div>
|
||||
<div style={{ marginTop: 4, fontSize: 12, lineHeight: 1.6, color: darkMode ? 'rgba(255,255,255,0.56)' : 'rgba(15,23,42,0.58)' }}>{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title="数据同步"
|
||||
title={renderModalTitle(isMigrationWorkflow ? '跨库迁移工作台' : '数据同步工作台', isMigrationWorkflow ? '按源库 → 目标库完成建表、导入与风险预检。' : '按已有目标表完成差异对比、同步执行与结果确认。')}
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
if (syncing) {
|
||||
@@ -414,23 +725,61 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
}
|
||||
onClose();
|
||||
}}
|
||||
width={800}
|
||||
width={920}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
closable={!syncing}
|
||||
maskClosable={!syncing}
|
||||
styles={{
|
||||
content: modalPanelStyle,
|
||||
header: { background: 'transparent', borderBottom: 'none', paddingBottom: 10 },
|
||||
body: {
|
||||
paddingTop: 8,
|
||||
height: 760,
|
||||
maxHeight: 'calc(100vh - 120px)',
|
||||
overflow: 'hidden',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
},
|
||||
footer: { background: 'transparent', borderTop: 'none', paddingTop: 12 },
|
||||
}}
|
||||
>
|
||||
<div style={modalWorkspaceStyle}>
|
||||
<div style={{ flex: '0 0 auto' }}>
|
||||
<div style={heroPanelStyle}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 12, alignItems: 'flex-start', flexWrap: 'wrap' }}>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontSize: 18, fontWeight: 700, color: darkMode ? '#f8fafc' : '#0f172a' }}>{isMigrationWorkflow ? '跨数据源迁移' : '数据同步'}</div>
|
||||
<div style={{ marginTop: 6, fontSize: 13, lineHeight: 1.7, color: darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(15,23,42,0.62)' }}>
|
||||
{isMigrationWorkflow
|
||||
? '适合把源表迁移到另一套数据库,可按策略自动建表、导入数据并补建可兼容索引。'
|
||||
: '适合目标表已存在的场景,先做差异分析,再按勾选执行插入、更新或删除。'}
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
|
||||
<span style={badgeStyle}>{isMigrationWorkflow ? <RocketOutlined /> : <SwapOutlined />} {isMigrationWorkflow ? '迁移模式' : '同步模式'}</span>
|
||||
<span style={badgeStyle}><DatabaseOutlined /> {sourceConnId ? '已选源连接' : '待选源连接'}</span>
|
||||
<span style={badgeStyle}><TableOutlined /> {selectedTables.length || 0} 张表</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<Steps current={currentStep} style={{ marginBottom: 24 }}>
|
||||
<Step title="配置源与目标" />
|
||||
<Step title="选择表" />
|
||||
<Step title="执行结果" />
|
||||
</Steps>
|
||||
</div>
|
||||
|
||||
<div style={modalScrollableContentStyle}>
|
||||
{/* STEP 1: CONFIG */}
|
||||
{currentStep === 0 && (
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<div style={{ display: 'grid', gridTemplateColumns: 'minmax(0, 1fr) 44px minmax(0, 1fr)', gap: 18, alignItems: 'stretch' }}>
|
||||
<Card
|
||||
title="源数据库"
|
||||
style={shellCardStyle}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
@@ -444,8 +793,16 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<div style={{ display: 'grid', placeItems: 'center' }}>
|
||||
<div style={{ ...badgeStyle, width: 44, height: 44, borderRadius: 14, justifyContent: 'center', padding: 0 }}>
|
||||
<SwapOutlined />
|
||||
</div>
|
||||
</div>
|
||||
<Card
|
||||
title="目标数据库"
|
||||
style={shellCardStyle}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
@@ -461,27 +818,94 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<Card title="同步选项" style={{ marginTop: 16 }}>
|
||||
<Card
|
||||
title={isMigrationWorkflow ? '迁移选项' : '同步选项'}
|
||||
style={{ ...shellCardStyle, marginTop: 18 }}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<div style={{ ...quietPanelStyle, marginBottom: 14 }}>
|
||||
<Text style={{ color: darkMode ? 'rgba(255,255,255,0.72)' : 'rgba(15,23,42,0.68)', lineHeight: 1.7 }}>
|
||||
先明确当前要做的是“已有目标表同步”还是“跨库迁移”,页面会按功能类型自动给出更安全的默认策略。
|
||||
</Text>
|
||||
</div>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="同步内容">
|
||||
<Form.Item label="功能类型">
|
||||
<Select value={workflowType} onChange={setWorkflowType}>
|
||||
<Option value="sync">数据同步(基于已有目标表做差异同步)</Option>
|
||||
<Option value="migration">跨库迁移(可自动建表后导入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Alert
|
||||
type={isMigrationWorkflow ? 'info' : 'success'}
|
||||
showIcon
|
||||
style={{ marginBottom: 12 }}
|
||||
message={isMigrationWorkflow
|
||||
? '当前为“跨库迁移”模式:适合将表迁移到另一数据源,可自动建表并导入数据。'
|
||||
: '当前为“数据同步”模式:适合目标表已存在时做增量同步或覆盖导入。'}
|
||||
/>
|
||||
<Form.Item label={isMigrationWorkflow ? '迁移内容' : '同步内容'}>
|
||||
<Select value={syncContent} onChange={setSyncContent}>
|
||||
<Option value="data">仅同步数据</Option>
|
||||
<Option value="schema">仅同步结构</Option>
|
||||
<Option value="both">同步结构 + 数据</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="同步模式">
|
||||
<Form.Item label={isMigrationWorkflow ? '迁移模式' : '同步模式'}>
|
||||
<Select value={syncMode} onChange={setSyncMode} disabled={syncContent === 'schema'}>
|
||||
<Option value="insert_update">增量同步(对比差异,按插入/更新/删除勾选执行)</Option>
|
||||
<Option value="insert_only">仅插入(不对比目标;无主键表将跳过)</Option>
|
||||
<Option value="full_overwrite">全量覆盖(清空目标表后插入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label={isMigrationWorkflow ? '目标表处理策略' : '目标表要求'}>
|
||||
<Select value={targetTableStrategy} onChange={setTargetTableStrategy} disabled={!isMigrationWorkflow}>
|
||||
<Option value="existing_only">仅使用已有目标表</Option>
|
||||
<Option value="auto_create_if_missing">目标表不存在时自动建表后导入</Option>
|
||||
<Option value="smart">智能模式(存在则直接导入,不存在则自动建表)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{isRedisMongoKeyspaceMigration && (
|
||||
<Form.Item
|
||||
label="Mongo 集合名(可选)"
|
||||
extra={sourceType === 'redis'
|
||||
? '为空时沿用默认集合名;填写后本次 Redis 键空间会统一写入该 Mongo 集合。'
|
||||
: 'MongoDB → Redis 场景下通常直接选择源集合;这里留空即可,未显式选集合时才会回退使用该名称。'}
|
||||
>
|
||||
<Input
|
||||
value={mongoCollectionName}
|
||||
onChange={(e) => setMongoCollectionName(e.target.value)}
|
||||
placeholder={defaultMongoCollectionName || '请输入 Mongo 集合名'}
|
||||
allowClear
|
||||
maxLength={128}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
<Form.Item>
|
||||
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
|
||||
自动补齐目标表缺失字段(仅 MySQL 目标)
|
||||
自动补齐目标表缺失字段(当前支持 MySQL 目标及 MySQL → Kingbase)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Checkbox checked={createIndexes} onChange={(e) => setCreateIndexes(e.target.checked)} disabled={!isMigrationWorkflow || targetTableStrategy === 'existing_only'}>
|
||||
自动迁移可兼容的普通索引/唯一索引(仅自动建表模式生效)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
{isMigrationWorkflow && targetTableStrategy !== 'existing_only' && (
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="自动建表模式首期仅支持 MySQL → Kingbase;将迁移字段、主键、普通/唯一/联合索引,并显式跳过全文、空间、前缀、函数类索引。"
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
{!isMigrationWorkflow && (
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="数据同步模式默认基于已有目标表执行;如需跨数据源建表导入,请切换到“跨库迁移”。"
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
{syncContent !== 'schema' && syncMode === 'full_overwrite' && (
|
||||
<Alert
|
||||
type="warning"
|
||||
@@ -496,26 +920,42 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 2: TABLES */}
|
||||
{currentStep === 1 && (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 14 }}>
|
||||
<div style={quietPanelStyle}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: 10 }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
|
||||
显示相同表
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Transfer
|
||||
</div>
|
||||
<Transfer
|
||||
dataSource={allTables.map(t => ({ key: t, title: t }))}
|
||||
titles={['源表', '已选表']}
|
||||
targetKeys={selectedTables}
|
||||
onChange={(keys) => setSelectedTables(keys as string[])}
|
||||
render={item => item.title}
|
||||
listStyle={{ width: 350, height: 280, marginTop: 0 }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表', notFoundContent: '暂无数据' }}
|
||||
listStyle={{ width: 390, height: 320, marginTop: 0, borderRadius: 14, overflow: 'hidden' }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表…', notFoundContent: '暂无数据' }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{diffTables.length > 0 && (
|
||||
<div>
|
||||
<Divider orientation="left">对比结果</Divider>
|
||||
<div style={quietPanelStyle}>
|
||||
<Divider orientation="left" style={{ marginTop: 0 }}>对比结果</Divider>
|
||||
{analysisWarnings.length > 0 && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="预检发现风险或降级项,请在执行前确认"
|
||||
description={
|
||||
<ul style={{ margin: 0, paddingLeft: 18 }}>
|
||||
{analysisWarnings.slice(0, 8).map((item) => <li key={item}>{item}</li>)}
|
||||
{analysisWarnings.length > 8 && <li>还有 {analysisWarnings.length - 8} 项未展开</li>}
|
||||
</ul>
|
||||
}
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
<Table
|
||||
size="small"
|
||||
pagination={false}
|
||||
@@ -527,13 +967,29 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const same = Number(t.same || 0);
|
||||
const msg = String(t.message || '').trim();
|
||||
const can = !!t.canSync;
|
||||
const warns = Array.isArray(t.warnings) ? t.warnings.length : 0;
|
||||
const unsupported = Array.isArray(t.unsupportedObjects) ? t.unsupportedObjects.length : 0;
|
||||
if (showSameTables) return true;
|
||||
if (!can) return true;
|
||||
if (msg) return true;
|
||||
if (msg || warns > 0 || unsupported > 0) return true;
|
||||
return ins > 0 || upd > 0 || del > 0 || same === 0;
|
||||
})}
|
||||
columns={[
|
||||
{ title: '表名', dataIndex: 'table', key: 'table', ellipsis: true },
|
||||
{
|
||||
title: '目标表',
|
||||
key: 'targetTableExists',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => r.targetTableExists ? '已存在' : '不存在'
|
||||
},
|
||||
{
|
||||
title: '计划',
|
||||
dataIndex: 'plannedAction',
|
||||
key: 'plannedAction',
|
||||
width: 220,
|
||||
ellipsis: true,
|
||||
render: (v: any) => String(v || '')
|
||||
},
|
||||
{
|
||||
title: '插入',
|
||||
key: 'inserts',
|
||||
@@ -542,11 +998,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.inserts || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.insert}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.insert} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}>
|
||||
{Number(r.inserts || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
@@ -560,11 +1012,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.updates || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.update}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.update} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}>
|
||||
{Number(r.updates || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
@@ -578,18 +1026,28 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.deletes || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.delete}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.delete} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}>
|
||||
{Number(r.deletes || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{ title: '相同', dataIndex: 'same', key: 'same', width: 70, render: (v: any) => Number(v || 0) },
|
||||
{ title: '消息', dataIndex: 'message', key: 'message', ellipsis: true, render: (v: any) => (v ? String(v) : '') },
|
||||
{
|
||||
title: '风险',
|
||||
key: 'warnings',
|
||||
width: 220,
|
||||
render: (_: any, r: any) => {
|
||||
const warns = [...(Array.isArray(r.warnings) ? r.warnings : []), ...(Array.isArray(r.unsupportedObjects) ? r.unsupportedObjects : [])];
|
||||
if (warns.length === 0) return '-';
|
||||
return (
|
||||
<div style={{ color: '#d48806', fontSize: 12, lineHeight: 1.5 }}>
|
||||
{warns.slice(0, 2).map((item: string) => <div key={item}>{item}</div>)}
|
||||
{warns.length > 2 && <div>还有 {warns.length - 2} 项</div>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '预览',
|
||||
key: 'preview',
|
||||
@@ -613,7 +1071,8 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 3: RESULT */}
|
||||
{currentStep === 2 && (
|
||||
<div>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 14 }}>
|
||||
<div style={quietPanelStyle}>
|
||||
<Alert
|
||||
message={syncing ? "正在同步" : (syncResult?.success ? "同步完成" : "同步失败")}
|
||||
description={
|
||||
@@ -625,7 +1084,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
showIcon
|
||||
/>
|
||||
|
||||
<div style={{ marginTop: 12 }}>
|
||||
<div style={{ marginTop: 14 }}>
|
||||
<Progress
|
||||
percent={syncProgress.percent}
|
||||
status={syncing ? "active" : (syncResult?.success ? "success" : "exception")}
|
||||
@@ -633,7 +1092,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Divider orientation="left">日志</Divider>
|
||||
</div>
|
||||
<div style={quietPanelStyle}>
|
||||
<Divider orientation="left" style={{ marginTop: 0 }}>执行日志</Divider>
|
||||
<div
|
||||
ref={logBoxRef}
|
||||
onScroll={() => {
|
||||
@@ -642,14 +1103,25 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const nearBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
|
||||
autoScrollRef.current = nearBottom;
|
||||
}}
|
||||
style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}
|
||||
style={{
|
||||
background: darkMode ? 'rgba(255,255,255,0.03)' : 'rgba(248,250,252,0.92)',
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)',
|
||||
borderRadius: 14,
|
||||
padding: 12,
|
||||
height: 300,
|
||||
overflowY: 'auto',
|
||||
fontFamily: 'SFMono-Regular, ui-monospace, Menlo, Consolas, monospace'
|
||||
}}
|
||||
>
|
||||
{syncLogs.map((item, i: number) => <div key={i}>{renderSyncLogItem(item)}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ marginTop: 24, textAlign: 'right' }}>
|
||||
</div>
|
||||
|
||||
<div style={modalFooterBarStyle}>
|
||||
{currentStep === 0 && (
|
||||
<Button type="primary" onClick={nextToTables} loading={loading}>下一步</Button>
|
||||
)}
|
||||
@@ -676,14 +1148,16 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
<Drawer
|
||||
title={`差异预览:${previewTable}`}
|
||||
styles={{ body: { background: darkMode ? 'rgba(9,13,20,0.98)' : '#f8fafc' } }}
|
||||
open={previewOpen}
|
||||
onClose={() => { setPreviewOpen(false); setPreviewTable(''); setPreviewData(null); }}
|
||||
width={900}
|
||||
>
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览..." />}
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览…" />}
|
||||
{!previewLoading && previewData && (
|
||||
<div>
|
||||
<Alert
|
||||
@@ -794,6 +1268,51 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'sql',
|
||||
label: `SQL(${previewSql.statementCount})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="SQL 预览会按当前勾选的插入/更新/删除与行选择范围生成,用于审核确认。"
|
||||
/>
|
||||
<div style={{ marginTop: 8, marginBottom: 8, display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">共 {previewSql.statementCount} 条语句(预览数据最多 200 条/类型)</Text>
|
||||
<Button
|
||||
size="small"
|
||||
disabled={!previewSql.sqlText}
|
||||
onClick={async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(previewSql.sqlText || '');
|
||||
message.success('SQL 已复制');
|
||||
} catch {
|
||||
message.error('复制失败,请手动复制');
|
||||
}
|
||||
}}
|
||||
>
|
||||
复制 SQL
|
||||
</Button>
|
||||
</div>
|
||||
<pre
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 10,
|
||||
border: '1px solid #f0f0f0',
|
||||
borderRadius: 6,
|
||||
background: '#fafafa',
|
||||
maxHeight: 420,
|
||||
overflow: 'auto',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-word'
|
||||
}}
|
||||
>
|
||||
{previewSql.sqlText || '-- 当前勾选范围下无 SQL 可预览'}
|
||||
</pre>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
@@ -1,12 +1,207 @@
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef, useMemo } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { buildOrderBySQL, buildWhereSQL, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { buildOrderBySQL, buildPaginatedSelectSQL, buildWhereSQL, quoteIdentPart, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { buildMongoCountCommand, buildMongoFilter, buildMongoFindCommand, buildMongoSort } from '../utils/mongodb';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
|
||||
type ViewerPaginationState = {
|
||||
current: number;
|
||||
pageSize: number;
|
||||
total: number;
|
||||
totalKnown: boolean;
|
||||
totalApprox: boolean;
|
||||
totalCountLoading: boolean;
|
||||
totalCountCancelled: boolean;
|
||||
};
|
||||
|
||||
const JS_MAX_SAFE_INTEGER_BIGINT = BigInt(Number.MAX_SAFE_INTEGER);
|
||||
|
||||
const isIntegerText = (text: string): boolean => /^[+-]?\d+$/.test(text);
|
||||
|
||||
const toNonNegativeFiniteNumber = (value: unknown): number | null => {
|
||||
if (typeof value === 'number') {
|
||||
return Number.isFinite(value) && value >= 0 && value <= Number.MAX_SAFE_INTEGER ? value : null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return value >= 0n && value <= JS_MAX_SAFE_INTEGER_BIGINT ? Number(value) : null;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
const text = value.trim();
|
||||
if (!text) return null;
|
||||
if (isIntegerText(text)) {
|
||||
try {
|
||||
const parsedBigInt = BigInt(text);
|
||||
if (parsedBigInt < 0n || parsedBigInt > JS_MAX_SAFE_INTEGER_BIGINT) {
|
||||
return null;
|
||||
}
|
||||
return Number(parsedBigInt);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
const parsed = Number(text);
|
||||
return Number.isFinite(parsed) && parsed >= 0 && parsed <= Number.MAX_SAFE_INTEGER ? parsed : null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseTotalFromCountRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized === 'total' || normalized === 'count' || normalized.includes('count')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [, raw] of entries) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseDuckDBApproxTotalRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
const preferredKeys = ['approx_total', 'estimated_size', 'estimated_rows', 'row_count', 'count', 'total'];
|
||||
for (const preferred of preferredKeys) {
|
||||
for (const [key, raw] of entries) {
|
||||
if (String(key || '').trim().toLowerCase() !== preferred) continue;
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized.includes('estimate') || normalized.includes('row') || normalized.includes('count') || normalized.includes('total')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const normalizeDuckDBIdentifier = (raw: string): string => {
|
||||
const text = String(raw || '').trim();
|
||||
if (text.length >= 2) {
|
||||
const first = text[0];
|
||||
const last = text[text.length - 1];
|
||||
if ((first === '"' && last === '"') || (first === '`' && last === '`')) {
|
||||
return text.slice(1, -1).trim();
|
||||
}
|
||||
}
|
||||
return text;
|
||||
};
|
||||
|
||||
const resolveDuckDBSchemaAndTable = (dbName: string, tableName: string) => {
|
||||
const rawTable = String(tableName || '').trim();
|
||||
if (!rawTable) return { schemaName: 'main', pureTableName: '' };
|
||||
|
||||
const parts = rawTable.split('.');
|
||||
if (parts.length >= 2) {
|
||||
const pureTableName = normalizeDuckDBIdentifier(parts[parts.length - 1]);
|
||||
const schemaName = normalizeDuckDBIdentifier(parts[parts.length - 2]);
|
||||
if (schemaName && pureTableName) {
|
||||
return { schemaName, pureTableName };
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackSchema = normalizeDuckDBIdentifier(String(dbName || '').trim()) || 'main';
|
||||
return { schemaName: fallbackSchema, pureTableName: normalizeDuckDBIdentifier(rawTable) };
|
||||
};
|
||||
|
||||
const escapeSQLLiteral = (value: string): string => String(value || '').replace(/'/g, "''");
|
||||
|
||||
const isDuckDBUnsupportedTypeError = (msg: string): boolean => /unsupported\s*type:\s*duckdb\./i.test(String(msg || ''));
|
||||
|
||||
const isDuckDBComplexColumnType = (columnType?: string): boolean => {
|
||||
const raw = String(columnType || '').trim().toLowerCase();
|
||||
if (!raw) return false;
|
||||
return raw.includes('map') || raw.includes('struct') || raw.includes('union') || raw.includes('array') || raw.includes('list');
|
||||
};
|
||||
|
||||
const reverseOrderBySQL = (orderBySQL: string): string => {
|
||||
const raw = String(orderBySQL || '').trim();
|
||||
if (!raw) return '';
|
||||
const body = raw.replace(/^order\s+by\s+/i, '').trim();
|
||||
if (!body) return '';
|
||||
|
||||
const parts = body
|
||||
.split(',')
|
||||
.map((part) => part.trim())
|
||||
.filter(Boolean)
|
||||
.map((part) => {
|
||||
if (/\s+asc$/i.test(part)) return part.replace(/\s+asc$/i, ' DESC');
|
||||
if (/\s+desc$/i.test(part)) return part.replace(/\s+desc$/i, ' ASC');
|
||||
return `${part} DESC`;
|
||||
});
|
||||
if (parts.length === 0) return '';
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
};
|
||||
|
||||
type ViewerFilterSnapshot = {
|
||||
showFilter: boolean;
|
||||
conditions: FilterCondition[];
|
||||
currentPage: number;
|
||||
pageSize: number;
|
||||
sortInfo: { columnKey: string, order: string } | null;
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
};
|
||||
|
||||
type ViewerScrollSnapshot = {
|
||||
top: number;
|
||||
left: number;
|
||||
};
|
||||
|
||||
const viewerFilterSnapshotsByTab = new Map<string, ViewerFilterSnapshot>();
|
||||
|
||||
const normalizeViewerFilterConditions = (conditions: FilterCondition[] | undefined): FilterCondition[] => {
|
||||
if (!Array.isArray(conditions)) return [];
|
||||
return conditions.map((cond) => ({
|
||||
id: Number.isFinite(Number(cond?.id)) ? Number(cond?.id) : undefined,
|
||||
enabled: cond?.enabled !== false,
|
||||
logic: String(cond?.logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND',
|
||||
column: String(cond?.column || ''),
|
||||
op: String(cond?.op || '='),
|
||||
value: String(cond?.value ?? ''),
|
||||
value2: String(cond?.value2 ?? ''),
|
||||
}));
|
||||
};
|
||||
|
||||
const getViewerFilterSnapshot = (tabId: string): ViewerFilterSnapshot => {
|
||||
const cached = viewerFilterSnapshotsByTab.get(String(tabId || '').trim());
|
||||
if (!cached) {
|
||||
return { showFilter: false, conditions: [], currentPage: 1, pageSize: 100, sortInfo: null, scrollTop: 0, scrollLeft: 0 };
|
||||
}
|
||||
return {
|
||||
showFilter: cached.showFilter === true,
|
||||
conditions: normalizeViewerFilterConditions(cached.conditions),
|
||||
currentPage: Number.isFinite(Number(cached.currentPage)) && Number(cached.currentPage) > 0 ? Number(cached.currentPage) : 1,
|
||||
pageSize: Number.isFinite(Number(cached.pageSize)) && Number(cached.pageSize) > 0 ? Number(cached.pageSize) : 100,
|
||||
sortInfo: cached.sortInfo && cached.sortInfo.columnKey && (cached.sortInfo.order === 'ascend' || cached.sortInfo.order === 'descend')
|
||||
? { columnKey: String(cached.sortInfo.columnKey), order: cached.sortInfo.order }
|
||||
: null,
|
||||
scrollTop: Number.isFinite(Number(cached.scrollTop)) ? Number(cached.scrollTop) : 0,
|
||||
scrollLeft: Number.isFinite(Number(cached.scrollLeft)) ? Number(cached.scrollLeft) : 0,
|
||||
};
|
||||
};
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const initialViewerSnapshot = useMemo(() => getViewerFilterSnapshot(tab.id), [tab.id]);
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
const [columnNames, setColumnNames] = useState<string[]>([]);
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
@@ -16,29 +211,184 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const fetchSeqRef = useRef(0);
|
||||
const countSeqRef = useRef(0);
|
||||
const countKeyRef = useRef<string>('');
|
||||
const duckdbApproxSeqRef = useRef(0);
|
||||
const duckdbApproxKeyRef = useRef<string>('');
|
||||
const manualCountSeqRef = useRef(0);
|
||||
const manualCountKeyRef = useRef<string>('');
|
||||
const pkSeqRef = useRef(0);
|
||||
const pkKeyRef = useRef<string>('');
|
||||
const latestConfigRef = useRef<any>(null);
|
||||
const latestDbTypeRef = useRef<string>('');
|
||||
const latestDbNameRef = useRef<string>('');
|
||||
const latestCountSqlRef = useRef<string>('');
|
||||
const latestCountKeyRef = useRef<string>('');
|
||||
const scrollSnapshotRef = useRef<ViewerScrollSnapshot>({
|
||||
top: initialViewerSnapshot.scrollTop,
|
||||
left: initialViewerSnapshot.scrollLeft,
|
||||
});
|
||||
const initialLoadRef = useRef(false);
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
const [pagination, setPagination] = useState<ViewerPaginationState>({
|
||||
current: initialViewerSnapshot.currentPage,
|
||||
pageSize: initialViewerSnapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(initialViewerSnapshot.sortInfo);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>([]);
|
||||
const currentConnType = (connections.find(c => c.id === tab.connectionId)?.config?.type || '').toLowerCase();
|
||||
const forceReadOnly = currentConnType === 'tdengine';
|
||||
const [showFilter, setShowFilter] = useState<boolean>(initialViewerSnapshot.showFilter);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>(initialViewerSnapshot.conditions);
|
||||
const duckdbSafeSelectCacheRef = useRef<Record<string, string>>({});
|
||||
const currentConnConfig = connections.find(c => c.id === tab.connectionId)?.config;
|
||||
const currentConnCaps = getDataSourceCapabilities(currentConnConfig);
|
||||
const currentConnType = currentConnCaps.type;
|
||||
const forceReadOnly = currentConnCaps.forceReadOnlyQueryResult;
|
||||
const persistViewerSnapshot = useCallback((tabId: string, overrides?: Partial<ViewerFilterSnapshot>) => {
|
||||
const normalizedTabId = String(tabId || '').trim();
|
||||
if (!normalizedTabId) return;
|
||||
viewerFilterSnapshotsByTab.set(normalizedTabId, {
|
||||
showFilter,
|
||||
conditions: normalizeViewerFilterConditions(filterConditions),
|
||||
currentPage: pagination.current,
|
||||
pageSize: pagination.pageSize,
|
||||
sortInfo,
|
||||
scrollTop: scrollSnapshotRef.current.top,
|
||||
scrollLeft: scrollSnapshotRef.current.left,
|
||||
...overrides,
|
||||
});
|
||||
}, [showFilter, filterConditions, pagination.current, pagination.pageSize, sortInfo]);
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setShowFilter(snapshot.showFilter);
|
||||
setFilterConditions(snapshot.conditions);
|
||||
setSortInfo(snapshot.sortInfo);
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
}, [tab.id]);
|
||||
|
||||
useEffect(() => {
|
||||
persistViewerSnapshot(tab.id);
|
||||
}, [tab.id, persistViewerSnapshot]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
persistViewerSnapshot(tab.id);
|
||||
};
|
||||
}, [tab.id, persistViewerSnapshot]);
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setPkColumns([]);
|
||||
pkKeyRef.current = '';
|
||||
countKeyRef.current = '';
|
||||
setPagination(prev => ({ ...prev, current: 1, total: 0, totalKnown: false }));
|
||||
}, [tab.connectionId, tab.dbName, tab.tableName]);
|
||||
duckdbApproxKeyRef.current = '';
|
||||
manualCountKeyRef.current = '';
|
||||
duckdbSafeSelectCacheRef.current = {};
|
||||
latestConfigRef.current = null;
|
||||
latestDbTypeRef.current = '';
|
||||
latestDbNameRef.current = '';
|
||||
latestCountSqlRef.current = '';
|
||||
latestCountKeyRef.current = '';
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
current: snapshot.currentPage,
|
||||
pageSize: snapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName]);
|
||||
|
||||
const handleTableScrollSnapshotChange = useCallback((snapshot: ViewerScrollSnapshot) => {
|
||||
scrollSnapshotRef.current = snapshot;
|
||||
persistViewerSnapshot(tab.id, {
|
||||
scrollTop: snapshot.top,
|
||||
scrollLeft: snapshot.left,
|
||||
});
|
||||
}, [tab.id, persistViewerSnapshot]);
|
||||
|
||||
const handleDuckDBManualCount = useCallback(async () => {
|
||||
if (latestDbTypeRef.current !== 'duckdb') {
|
||||
return;
|
||||
}
|
||||
const config = latestConfigRef.current;
|
||||
const dbName = latestDbNameRef.current;
|
||||
const countSql = latestCountSqlRef.current;
|
||||
const countKey = latestCountKeyRef.current;
|
||||
|
||||
if (!config || !countSql || !countKey) {
|
||||
message.warning('当前结果集尚未就绪,请先执行一次加载');
|
||||
return;
|
||||
}
|
||||
|
||||
manualCountKeyRef.current = countKey;
|
||||
const countSeq = ++manualCountSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: true, totalCountCancelled: false }));
|
||||
const countConfig: any = { ...(config as any), timeout: 120 };
|
||||
|
||||
try {
|
||||
const resCount = await DBQuery(countConfig as any, dbName, countSql);
|
||||
const countDuration = Date.now() - countStart;
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-duckdb-manual-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount?.success ? 'success' : 'error',
|
||||
duration: countDuration,
|
||||
message: resCount?.success ? '' : String(resCount?.message || '统计失败'),
|
||||
dbName
|
||||
});
|
||||
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
|
||||
if (!resCount?.success) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(String(resCount?.message || '统计总数失败'));
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
return;
|
||||
}
|
||||
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error('统计结果解析失败');
|
||||
return;
|
||||
}
|
||||
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
} catch (e: any) {
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(`统计总数失败: ${String(e?.message || e)}`);
|
||||
}
|
||||
}, [addSqlLog]);
|
||||
|
||||
const handleDuckDBCancelManualCount = useCallback(() => {
|
||||
manualCountSeqRef.current++;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false, totalCountCancelled: true }));
|
||||
}, []);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
@@ -65,40 +415,141 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const tableName = tab.tableName || '';
|
||||
const isMongoDB = dbTypeLower === 'mongodb';
|
||||
let mongoFilter: Record<string, unknown> | undefined;
|
||||
if (isMongoDB) {
|
||||
try {
|
||||
mongoFilter = buildMongoFilter(filterConditions);
|
||||
} catch (e: any) {
|
||||
message.error(`Mongo 筛选条件无效:${String(e?.message || e || '解析失败')}`);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const offset = (page - 1) * size;
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
const whereSQL = isMongoDB
|
||||
? JSON.stringify(mongoFilter || {})
|
||||
: buildWhereSQL(dbType, filterConditions);
|
||||
const countSql = isMongoDB
|
||||
? buildMongoCountCommand(tableName, mongoFilter || {})
|
||||
: `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
const orderBySQL = isMongoDB ? '' : buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const totalRows = Number(pagination.total);
|
||||
const hasFiniteTotal = Number.isFinite(totalRows) && totalRows >= 0;
|
||||
const totalKnown = pagination.totalKnown && hasFiniteTotal;
|
||||
const totalPages = hasFiniteTotal ? Math.max(1, Math.ceil(totalRows / size)) : 0;
|
||||
const currentPage = totalPages > 0 ? Math.min(Math.max(1, page), totalPages) : Math.max(1, page);
|
||||
const offset = (currentPage - 1) * size;
|
||||
const isClickHouse = !isMongoDB && dbTypeLower === 'clickhouse';
|
||||
const reverseOrderSQL = isClickHouse ? reverseOrderBySQL(orderBySQL) : '';
|
||||
let useClickHouseReversePagination = false;
|
||||
let clickHouseReverseLimit = 0;
|
||||
let clickHouseReverseHasMore = false;
|
||||
let sql = '';
|
||||
if (isMongoDB) {
|
||||
const mongoSort = buildMongoSort(sortInfo, pkColumns);
|
||||
sql = buildMongoFindCommand({
|
||||
collection: tableName,
|
||||
filter: mongoFilter || {},
|
||||
sort: mongoSort,
|
||||
limit: size + 1,
|
||||
skip: offset,
|
||||
});
|
||||
} else {
|
||||
const baseSql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql = `${baseSql}${orderBySQL}`;
|
||||
// ClickHouse 深分页在超大 OFFSET 下容易超时。对于总数已知且存在 ORDER BY 的场景,
|
||||
// 当“尾部偏移”小于“头部偏移”时,改为反向 ORDER BY + 小 OFFSET,并在前端翻转结果。
|
||||
if (isClickHouse && totalKnown && offset > 0 && reverseOrderSQL) {
|
||||
const pageRowCount = Math.max(0, Math.min(size, totalRows - offset));
|
||||
if (pageRowCount > 0) {
|
||||
const tailOffset = Math.max(0, totalRows - (offset + pageRowCount));
|
||||
if (tailOffset < offset) {
|
||||
sql = buildPaginatedSelectSQL(dbType, baseSql, reverseOrderSQL, pageRowCount, tailOffset);
|
||||
useClickHouseReversePagination = true;
|
||||
clickHouseReverseLimit = pageRowCount;
|
||||
clickHouseReverseHasMore = currentPage < totalPages;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!useClickHouseReversePagination) {
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql = buildPaginatedSelectSQL(dbType, baseSql, orderBySQL, size + 1, offset);
|
||||
}
|
||||
}
|
||||
|
||||
const requestStartTime = Date.now();
|
||||
let executedSql = sql;
|
||||
try {
|
||||
const executeDataQuery = async (querySql: string, attemptLabel: string) => {
|
||||
const startTime = Date.now();
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
} catch (e: any) {
|
||||
const errMessage = String(e?.message || e || 'query failed');
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: `${attemptLabel}: ${errMessage}`,
|
||||
dbName
|
||||
});
|
||||
return { success: false, message: errMessage, data: [], fields: [] };
|
||||
}
|
||||
};
|
||||
|
||||
const hasSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
const isSortMemoryErr = (msg: string) => /error\s*1038|out of sort memory/i.test(String(msg || ''));
|
||||
let resData = await executeDataQuery(sql, '主查询');
|
||||
|
||||
if (!resData.success && dbTypeLower === 'duckdb' && isDuckDBUnsupportedTypeError(String(resData.message || ''))) {
|
||||
const cacheKey = `${tab.connectionId}|${dbName}|${tableName}`;
|
||||
let safeSelect = duckdbSafeSelectCacheRef.current[cacheKey] || '';
|
||||
if (!safeSelect) {
|
||||
try {
|
||||
const resCols = await DBGetColumns(config as any, dbName, tableName);
|
||||
if (resCols?.success && Array.isArray(resCols.data)) {
|
||||
const columnDefs = resCols.data as ColumnDefinition[];
|
||||
const selectParts = columnDefs.map((col) => {
|
||||
const colName = String(col?.name || '').trim();
|
||||
if (!colName) return '';
|
||||
const quotedCol = quoteIdentPart(dbType, colName);
|
||||
if (isDuckDBComplexColumnType(col?.type)) {
|
||||
return `CAST(${quotedCol} AS VARCHAR) AS ${quotedCol}`;
|
||||
}
|
||||
return quotedCol;
|
||||
}).filter(Boolean);
|
||||
if (selectParts.length > 0) {
|
||||
safeSelect = selectParts.join(', ');
|
||||
duckdbSafeSelectCacheRef.current[cacheKey] = safeSelect;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore and keep original error path
|
||||
}
|
||||
}
|
||||
|
||||
if (safeSelect) {
|
||||
let fallbackSql = `SELECT ${safeSelect} FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
fallbackSql = buildPaginatedSelectSQL(dbType, fallbackSql, buildOrderBySQL(dbType, sortInfo, pkColumns), size + 1, offset);
|
||||
executedSql = fallbackSql;
|
||||
resData = await executeDataQuery(fallbackSql, '复杂类型降级重试');
|
||||
}
|
||||
}
|
||||
|
||||
if (!resData.success && isMySQLFamily && hasSort && isSortMemoryErr(resData.message)) {
|
||||
const retrySql32MB = withSortBufferTuningSQL(dbType, sql, 32 * 1024 * 1024);
|
||||
if (retrySql32MB !== sql) {
|
||||
@@ -141,7 +592,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
let resultData = resData.data as any[];
|
||||
if (!Array.isArray(resultData)) resultData = [];
|
||||
|
||||
const hasMore = resultData.length > size;
|
||||
if (useClickHouseReversePagination) {
|
||||
// 反向查询后恢复为原排序方向,保证用户看到的仍是“最后一页正序数据”。
|
||||
resultData = resultData.slice(0, clickHouseReverseLimit).reverse();
|
||||
}
|
||||
|
||||
const hasMore = useClickHouseReversePagination ? clickHouseReverseHasMore : resultData.length > size;
|
||||
if (hasMore) resultData = resultData.slice(0, size);
|
||||
|
||||
let fieldNames = resData.fields || [];
|
||||
@@ -156,26 +612,71 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setData(resultData);
|
||||
const countKey = `${tab.connectionId}|${dbName}|${tableName}|${whereSQL}`;
|
||||
const derivedTotalKnown = !hasMore;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : page * size + 1;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : currentPage * size + 1;
|
||||
const isDuckDB = dbTypeLower === 'duckdb';
|
||||
const minExpectedTotal = hasMore ? offset + resultData.length + 1 : offset + resultData.length;
|
||||
if (derivedTotalKnown) countKeyRef.current = countKey;
|
||||
latestConfigRef.current = config;
|
||||
latestDbTypeRef.current = dbTypeLower;
|
||||
latestDbNameRef.current = dbName;
|
||||
latestCountSqlRef.current = countSql;
|
||||
latestCountKeyRef.current = countKey;
|
||||
|
||||
setPagination(prev => {
|
||||
if (derivedTotalKnown) {
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: true };
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
if (prev.totalKnown && countKeyRef.current === countKey) {
|
||||
return { ...prev, current: page, pageSize: size };
|
||||
if (!isDuckDB) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
// 当当前页存在“下一页”信号时,已知总数至少应大于当前页末尾。
|
||||
// 若旧总数不满足该条件(例如历史统计值为 0),降级为未知总数并回退到 derivedTotal。
|
||||
if (Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
}
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: false };
|
||||
const keepManualCounting = prev.totalCountLoading && manualCountKeyRef.current === countKey;
|
||||
if (isDuckDB && prev.totalApprox && duckdbApproxKeyRef.current === countKey && Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: keepManualCounting ? false : prev.totalCountCancelled,
|
||||
};
|
||||
});
|
||||
|
||||
if (!derivedTotalKnown) {
|
||||
const shouldRunAsyncCount = !derivedTotalKnown && !isDuckDB;
|
||||
if (shouldRunAsyncCount) {
|
||||
if (countKeyRef.current !== countKey) {
|
||||
countKeyRef.current = countKey;
|
||||
const countSeq = ++countSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
// 大表 COUNT(*) 可能非常慢,且在部分运行时环境下会影响后续操作响应;
|
||||
// 这里为统计请求设置更短的超时,避免“后台统计”长期占用资源。
|
||||
// DuckDB 大文件场景下该统计会显著拖慢翻页,已禁用后台 COUNT。
|
||||
const countConfig: any = { ...(config as any), timeout: 5 };
|
||||
|
||||
DBQuery(countConfig, dbName, countSql)
|
||||
@@ -198,10 +699,17 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (!resCount.success) return;
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) return;
|
||||
|
||||
const total = Number(resCount.data[0]?.['total']);
|
||||
if (!Number.isFinite(total) || total < 0) return;
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) return;
|
||||
|
||||
setPagination(prev => ({ ...prev, total, totalKnown: true }));
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
})
|
||||
.catch(() => {
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
@@ -210,6 +718,50 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isDuckDB && !derivedTotalKnown && whereSQL.trim() === '' && duckdbApproxKeyRef.current !== countKey) {
|
||||
duckdbApproxKeyRef.current = countKey;
|
||||
const approxSeq = ++duckdbApproxSeqRef.current;
|
||||
const { schemaName, pureTableName } = resolveDuckDBSchemaAndTable(dbName, tableName);
|
||||
const escapedSchema = escapeSQLLiteral(schemaName);
|
||||
const escapedTable = escapeSQLLiteral(pureTableName);
|
||||
const approxConfig: any = { ...(config as any), timeout: 3 };
|
||||
const approxSqlCandidates = [
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE schema_name='${escapedSchema}' AND table_name='${escapedTable}' LIMIT 1`,
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE table_name='${escapedTable}' ORDER BY CASE WHEN schema_name='${escapedSchema}' THEN 0 ELSE 1 END LIMIT 1`,
|
||||
];
|
||||
|
||||
(async () => {
|
||||
for (const approxSql of approxSqlCandidates) {
|
||||
try {
|
||||
const approxRes = await DBQuery(approxConfig as any, dbName, approxSql);
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
if (!approxRes?.success || !Array.isArray(approxRes.data) || approxRes.data.length === 0) continue;
|
||||
|
||||
const approxTotal = parseDuckDBApproxTotalRow(approxRes.data[0]);
|
||||
if (approxTotal === null) continue;
|
||||
if (!Number.isFinite(approxTotal) || approxTotal < minExpectedTotal) continue;
|
||||
|
||||
setPagination(prev => {
|
||||
if (countKeyRef.current !== countKey) return prev;
|
||||
if (prev.totalKnown) return prev;
|
||||
return {
|
||||
...prev,
|
||||
total: approxTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
} else {
|
||||
message.error(String(resData.message || '查询失败'));
|
||||
}
|
||||
@@ -227,7 +779,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
}
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns]);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns, pagination.total, pagination.totalKnown]);
|
||||
// 依赖 pkColumns:在无手动排序时可回退到主键稳定排序。
|
||||
// 主键信息只会在首次加载后更新一次,避免循环查询。
|
||||
|
||||
@@ -248,9 +800,32 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const handleToggleFilter = useCallback(() => setShowFilter(prev => !prev), []);
|
||||
const handleApplyFilter = useCallback((conditions: FilterCondition[]) => setFilterConditions(conditions), []);
|
||||
|
||||
const exportSqlWithFilter = useMemo(() => {
|
||||
const tableName = String(tab.tableName || '').trim();
|
||||
const dbType = String(currentConnConfig?.type || '').trim();
|
||||
if (!tableName || !dbType) return '';
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
if (!whereSQL) return '';
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const normalizedType = dbType.toLowerCase();
|
||||
const hasExplicitSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
if (hasExplicitSort && (normalizedType === 'mysql' || normalizedType === 'mariadb')) {
|
||||
sql = withSortBufferTuningSQL(normalizedType, sql, 32 * 1024 * 1024);
|
||||
}
|
||||
return sql;
|
||||
}, [tab.tableName, currentConnConfig?.type, filterConditions, sortInfo, pkColumns]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
if (!initialLoadRef.current) {
|
||||
initialLoadRef.current = true;
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
return;
|
||||
}
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
|
||||
return (
|
||||
<div style={{ flex: '1 1 auto', minHeight: 0, minWidth: 0, height: '100%', width: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
@@ -259,6 +834,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
columnNames={columnNames}
|
||||
loading={loading}
|
||||
tableName={tab.tableName}
|
||||
exportScope="table"
|
||||
dbName={tab.dbName}
|
||||
connectionId={tab.connectionId}
|
||||
pkColumns={pkColumns}
|
||||
@@ -266,11 +842,17 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
onSort={handleSort}
|
||||
onPageChange={handlePageChange}
|
||||
pagination={pagination}
|
||||
onRequestTotalCount={currentConnType === 'duckdb' ? handleDuckDBManualCount : undefined}
|
||||
onCancelTotalCount={currentConnType === 'duckdb' ? handleDuckDBCancelManualCount : undefined}
|
||||
showFilter={showFilter}
|
||||
onToggleFilter={handleToggleFilter}
|
||||
onApplyFilter={handleApplyFilter}
|
||||
appliedFilterConditions={filterConditions}
|
||||
readOnly={forceReadOnly}
|
||||
sortInfoExternal={sortInfo}
|
||||
exportSqlWithFilter={exportSqlWithFilter || undefined}
|
||||
scrollSnapshot={scrollSnapshotRef.current}
|
||||
onScrollSnapshotChange={handleTableScrollSnapshotChange}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
import React, { useRef, useEffect } from 'react';
|
||||
import { Table, Tag, Button, Tooltip } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, CaretRightOutlined, BugOutlined } from '@ant-design/icons';
|
||||
import { Table, Tag, Button, Tooltip, Empty } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, BugOutlined, ClockCircleOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
interface LogPanelProps {
|
||||
height: number;
|
||||
@@ -16,7 +16,8 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const opacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
|
||||
// Background Helper
|
||||
const getBg = (darkHex: string) => {
|
||||
@@ -27,24 +28,40 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
||||
const bgMain = getBg('#1f1f1f');
|
||||
const bgToolbar = getBg('#2a2a2a');
|
||||
const logScrollbarThumb = darkMode ? 'rgba(255, 255, 255, 0.34)' : 'rgba(0, 0, 0, 0.26)';
|
||||
const logScrollbarThumbHover = darkMode ? 'rgba(255, 255, 255, 0.5)' : 'rgba(0, 0, 0, 0.36)';
|
||||
const bgMain = getBg('#1d1d1d');
|
||||
const shellOpacity = darkMode ? Math.max(0.18, opacity * 0.82) : Math.max(0.28, opacity * 0.92);
|
||||
const shellOpacityStrong = darkMode ? Math.max(0.22, opacity * 0.9) : Math.max(0.34, opacity * 0.96);
|
||||
const panelDividerColor = darkMode
|
||||
? `rgba(255,255,255,${Math.max(0.04, opacity * 0.10)})`
|
||||
: `rgba(0,0,0,${Math.max(0.04, opacity * 0.08)})`;
|
||||
const panelMutedTextColor = darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(0,0,0,0.58)';
|
||||
const panelShellBg = darkMode
|
||||
? `linear-gradient(180deg, rgba(15,20,30,${shellOpacity}) 0%, rgba(9,13,22,${shellOpacityStrong}) 100%)`
|
||||
: `linear-gradient(180deg, rgba(255,255,255,${shellOpacityStrong}) 0%, rgba(246,248,252,${shellOpacity}) 100%)`;
|
||||
const panelAccentColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
const panelShadow = darkMode
|
||||
? `0 12px 28px rgba(0,0,0,${Math.max(0.05, opacity * 0.18)})`
|
||||
: `0 12px 24px rgba(15,23,42,${Math.max(0.02, opacity * 0.08)})`;
|
||||
const logScrollbarThumb = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.18, opacity * 0.34)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.12, opacity * 0.26)})`;
|
||||
const logScrollbarThumbHover = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.28, opacity * 0.48)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.18, opacity * 0.36)})`;
|
||||
|
||||
const columns = [
|
||||
{
|
||||
title: 'Time',
|
||||
dataIndex: 'timestamp',
|
||||
width: 80,
|
||||
render: (ts: number) => <span style={{ color: '#888', fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
render: (ts: number) => <span style={{ color: panelMutedTextColor, fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
},
|
||||
{
|
||||
title: 'Status',
|
||||
dataIndex: 'status',
|
||||
width: 70,
|
||||
render: (status: string) => (
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0 }}>
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0, borderRadius: 999, paddingInline: 8, fontSize: 11, fontWeight: 700 }}>
|
||||
{status === 'success' ? 'OK' : 'ERR'}
|
||||
</Tag>
|
||||
)
|
||||
@@ -59,10 +76,10 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
title: 'SQL / Message',
|
||||
dataIndex: 'sql',
|
||||
render: (text: string, record: any) => (
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.2' }}>
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.45' }}>
|
||||
<div style={{ color: darkMode ? '#a6e22e' : '#005cc5' }}>{text}</div>
|
||||
{record.message && <div style={{ color: '#ff4d4f', marginTop: 2 }}>{record.message}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: '#888', marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: panelMutedTextColor, marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -71,12 +88,18 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return (
|
||||
<div style={{
|
||||
height,
|
||||
borderTop: 'none',
|
||||
background: bgMain,
|
||||
margin: 0,
|
||||
border: `1px solid ${panelDividerColor}`,
|
||||
borderRadius: 14,
|
||||
background: panelShellBg,
|
||||
WebkitBackdropFilter: opacity < 0.999 ? 'blur(14px)' : 'none',
|
||||
boxShadow: panelShadow,
|
||||
backdropFilter: darkMode && opacity < 0.999 ? 'blur(18px)' : 'none',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
position: 'relative',
|
||||
zIndex: 100 // Ensure above other content
|
||||
overflow: 'hidden',
|
||||
zIndex: 100
|
||||
}}>
|
||||
{/* Resize Handle */}
|
||||
<div
|
||||
@@ -94,38 +117,53 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
|
||||
{/* Toolbar */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
borderBottom: 'none',
|
||||
padding: '10px 14px',
|
||||
borderBottom: `1px solid ${panelDividerColor}`,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
height: 32
|
||||
gap: 12,
|
||||
minHeight: 48
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, fontWeight: 'bold', fontSize: '12px' }}>
|
||||
<BugOutlined /> SQL 执行日志
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 10, minWidth: 0 }}>
|
||||
<div style={{ width: 30, height: 30, borderRadius: 10, display: 'grid', placeItems: 'center', background: darkMode ? `rgba(255,214,102,${Math.max(0.10, Math.min(0.18, opacity * 0.18))})` : `rgba(24,144,255,${Math.max(0.08, Math.min(0.16, opacity * 0.16))})`, color: panelAccentColor, flexShrink: 0 }}>
|
||||
<BugOutlined />
|
||||
</div>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontWeight: 700, fontSize: 13, color: darkMode ? '#f5f7ff' : '#162033' }}>SQL 执行日志</div>
|
||||
<div style={{ fontSize: 12, color: panelMutedTextColor }}>记录执行状态、耗时与错误信息,便于快速回溯。</div>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<Tooltip title="清空日志">
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} />
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
<Tooltip title="关闭面板">
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} />
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* List */}
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto' }}>
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
// scroll={{ y: height - 32 }} // Let flex handle it
|
||||
/>
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto', padding: '8px 10px 10px' }}>
|
||||
{sqlLogs.length === 0 ? (
|
||||
<div style={{ height: '100%', minHeight: 160, display: 'grid', placeItems: 'center' }}>
|
||||
<Empty
|
||||
image={Empty.PRESENTED_IMAGE_SIMPLE}
|
||||
description={<span style={{ color: panelMutedTextColor }}>暂无 SQL 执行日志</span>}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<style>{`
|
||||
.log-panel-scroll {
|
||||
@@ -155,6 +193,16 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
background: transparent !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
padding: 8px 10px !important;
|
||||
border-bottom: 1px solid ${panelDividerColor} !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr:last-child > td {
|
||||
border-bottom: none !important;
|
||||
}
|
||||
.log-panel-table .ant-table-row:hover > td {
|
||||
background: ${darkMode ? 'rgba(255,255,255,0.03)' : 'rgba(16,24,40,0.03)'} !important;
|
||||
}
|
||||
`}</style>
|
||||
</div>
|
||||
);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,11 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import React, { useMemo, useRef, useState } from 'react';
|
||||
import { Tabs, Dropdown } from 'antd';
|
||||
import type { MenuProps } from 'antd';
|
||||
import type { MenuProps, TabsProps } from 'antd';
|
||||
import { DndContext, PointerSensor, closestCenter, useSensor, useSensors } from '@dnd-kit/core';
|
||||
import type { DragStartEvent, DragEndEvent } from '@dnd-kit/core';
|
||||
import { SortableContext, useSortable, horizontalListSortingStrategy } from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import { restrictToHorizontalAxis } from '@dnd-kit/modifiers';
|
||||
import { useStore } from '../store';
|
||||
import DataViewer from './DataViewer';
|
||||
import QueryEditor from './QueryEditor';
|
||||
@@ -29,9 +34,58 @@ const buildTabDisplayTitle = (tab: TabData, connectionName: string | undefined):
|
||||
return `[${prefix}] ${tab.title}`;
|
||||
};
|
||||
|
||||
type SortableTabLabelProps = {
|
||||
displayTitle: string;
|
||||
menuItems: MenuProps['items'];
|
||||
};
|
||||
|
||||
const SortableTabLabel: React.FC<SortableTabLabelProps> = ({
|
||||
displayTitle,
|
||||
menuItems,
|
||||
}) => {
|
||||
return (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span
|
||||
className="tab-dnd-label"
|
||||
onContextMenu={(e) => e.preventDefault()}
|
||||
title="拖拽调整标签顺序"
|
||||
>
|
||||
{displayTitle}
|
||||
</span>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
||||
type DraggableTabNodeProps = {
|
||||
node: React.ReactElement;
|
||||
};
|
||||
|
||||
const DraggableTabNode: React.FC<DraggableTabNodeProps> = ({ node }) => {
|
||||
const tabId = String(node.key || '').trim();
|
||||
const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id: tabId });
|
||||
const style: React.CSSProperties = {
|
||||
...(node.props.style || {}),
|
||||
transform: CSS.Transform.toString(transform),
|
||||
transition: transition || 'transform 180ms cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
opacity: isDragging ? 0.88 : 1,
|
||||
cursor: isDragging ? 'grabbing' : 'grab',
|
||||
touchAction: 'none',
|
||||
zIndex: isDragging ? 2 : node.props.style?.zIndex,
|
||||
};
|
||||
|
||||
return React.cloneElement(node, {
|
||||
ref: setNodeRef,
|
||||
style,
|
||||
...attributes,
|
||||
...listeners,
|
||||
className: `${node.props.className || ''} tab-dnd-node${isDragging ? ' is-dragging' : ''}`,
|
||||
});
|
||||
};
|
||||
|
||||
const TabManager: React.FC = () => {
|
||||
const tabs = useStore(state => state.tabs);
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
const setActiveTab = useStore(state => state.setActiveTab);
|
||||
const closeTab = useStore(state => state.closeTab);
|
||||
@@ -39,6 +93,15 @@ const TabManager: React.FC = () => {
|
||||
const closeTabsToLeft = useStore(state => state.closeTabsToLeft);
|
||||
const closeTabsToRight = useStore(state => state.closeTabsToRight);
|
||||
const closeAllTabs = useStore(state => state.closeAllTabs);
|
||||
const moveTab = useStore(state => state.moveTab);
|
||||
const tabsNavBorderColor = theme === 'dark' ? 'rgba(255, 255, 255, 0.09)' : 'rgba(0, 0, 0, 0.08)';
|
||||
const [draggingTabId, setDraggingTabId] = useState<string | null>(null);
|
||||
const suppressClickUntilRef = useRef<number>(0);
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor, {
|
||||
activationConstraint: { distance: 8 },
|
||||
})
|
||||
);
|
||||
|
||||
const onChange = (newActiveKey: string) => {
|
||||
setActiveTab(newActiveKey);
|
||||
@@ -50,6 +113,34 @@ const TabManager: React.FC = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleDragStart = (event: DragStartEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
setDraggingTabId(sourceId || null);
|
||||
};
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
const targetId = String(event.over?.id || '').trim();
|
||||
setDraggingTabId(null);
|
||||
if (!sourceId || !targetId || sourceId === targetId) {
|
||||
return;
|
||||
}
|
||||
suppressClickUntilRef.current = Date.now() + 120;
|
||||
moveTab(sourceId, targetId);
|
||||
};
|
||||
|
||||
const handleDragCancel = () => {
|
||||
setDraggingTabId(null);
|
||||
};
|
||||
|
||||
const tabIds = useMemo(() => tabs.map((tab) => tab.id), [tabs]);
|
||||
|
||||
const renderTabBar: TabsProps['renderTabBar'] = (tabBarProps, DefaultTabBar) => (
|
||||
<DefaultTabBar {...tabBarProps}>
|
||||
{(node) => <DraggableTabNode key={node.key} node={node} />}
|
||||
</DefaultTabBar>
|
||||
);
|
||||
|
||||
const items = useMemo(() => tabs.map((tab, index) => {
|
||||
const connectionName = connections.find((conn) => conn.id === tab.connectionId)?.name;
|
||||
const displayTitle = buildTabDisplayTitle(tab, connectionName);
|
||||
@@ -100,9 +191,10 @@ const TabManager: React.FC = () => {
|
||||
|
||||
return {
|
||||
label: (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span onContextMenu={(e) => e.preventDefault()}>{displayTitle}</span>
|
||||
</Dropdown>
|
||||
<SortableTabLabel
|
||||
displayTitle={displayTitle}
|
||||
menuItems={menuItems}
|
||||
/>
|
||||
),
|
||||
key: tab.id,
|
||||
children: content,
|
||||
@@ -156,18 +248,64 @@ const TabManager: React.FC = () => {
|
||||
display: none !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-nav::before {
|
||||
border-bottom: none !important;
|
||||
border-bottom: 1px solid ${tabsNavBorderColor} !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-tab {
|
||||
transition: transform 180ms cubic-bezier(0.22, 1, 0.36, 1), background-color 120ms ease;
|
||||
}
|
||||
.main-tabs .tab-dnd-label {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
max-width: 100%;
|
||||
}
|
||||
.main-tabs .tab-dnd-node.is-dragging,
|
||||
.main-tabs .tab-dnd-node.is-dragging .tab-dnd-label {
|
||||
cursor: grabbing !important;
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(255, 214, 102, 0.72);
|
||||
background: rgba(255, 214, 102, 0.16);
|
||||
}
|
||||
body[data-theme='light'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(9, 109, 217, 0.32);
|
||||
background: rgba(9, 109, 217, 0.08);
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab.ant-tabs-tab-active {
|
||||
background: rgba(255, 214, 102, 0.12) !important;
|
||||
border-color: rgba(255, 214, 102, 0.4) !important;
|
||||
}
|
||||
`}</style>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={onChange}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
/>
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={closestCenter}
|
||||
modifiers={[restrictToHorizontalAxis]}
|
||||
onDragStart={handleDragStart}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDragCancel={handleDragCancel}
|
||||
>
|
||||
<SortableContext items={tabIds} strategy={horizontalListSortingStrategy}>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
destroyInactiveTabPane={false}
|
||||
onChange={(newActiveKey) => {
|
||||
if (Date.now() < suppressClickUntilRef.current) return;
|
||||
onChange(newActiveKey);
|
||||
}}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
renderTabBar={renderTabBar}
|
||||
/>
|
||||
</SortableContext>
|
||||
</DndContext>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -259,10 +259,20 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
const resizeGuideColor = darkMode ? '#f6c453' : '#1890ff';
|
||||
const readOnly = !!tab.readOnly;
|
||||
const panelRadius = 10;
|
||||
const panelFrameColor = darkMode ? 'rgba(0, 0, 0, 0.18)' : 'rgba(0, 0, 0, 0.12)';
|
||||
const panelToolbarBorder = darkMode ? 'rgba(255, 255, 255, 0.12)' : 'rgba(0, 0, 0, 0.10)';
|
||||
const panelToolbarBg = darkMode ? 'rgba(20, 20, 20, 0.35)' : 'rgba(255, 255, 255, 0.72)';
|
||||
const panelBodyBg = darkMode ? 'rgba(0, 0, 0, 0.24)' : 'rgba(255, 255, 255, 0.82)';
|
||||
const focusRowBg = darkMode ? 'rgba(246, 196, 83, 0.22)' : 'rgba(24, 144, 255, 0.12)';
|
||||
|
||||
const [tableHeight, setTableHeight] = useState(500);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const pendingFocusColumnKeyRef = useRef<string | null>(null);
|
||||
const focusHighlightTimerRef = useRef<number | null>(null);
|
||||
const [focusColumnKey, setFocusColumnKey] = useState('');
|
||||
|
||||
const openCommentEditor = useCallback((record: EditableColumn) => {
|
||||
if (!record?._key) return;
|
||||
@@ -345,6 +355,61 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setSelectedColumnRowKeys(prev => prev.filter(key => columns.some(c => c._key === key)));
|
||||
}, [columns]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (focusHighlightTimerRef.current !== null) {
|
||||
window.clearTimeout(focusHighlightTimerRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const focusColumnRow = useCallback((targetKey: string): boolean => {
|
||||
if (activeKey !== 'columns') return false;
|
||||
const tableBody = containerRef.current?.querySelector('.ant-table-body') as HTMLElement | null;
|
||||
if (!tableBody) return false;
|
||||
const row = tableBody.querySelector(`tr[data-row-key="${targetKey}"]`) as HTMLTableRowElement | null;
|
||||
if (!row) return false;
|
||||
|
||||
row.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
setFocusColumnKey(targetKey);
|
||||
if (focusHighlightTimerRef.current !== null) {
|
||||
window.clearTimeout(focusHighlightTimerRef.current);
|
||||
}
|
||||
focusHighlightTimerRef.current = window.setTimeout(() => {
|
||||
setFocusColumnKey(prev => (prev === targetKey ? '' : prev));
|
||||
}, 1600);
|
||||
|
||||
if (!readOnly) {
|
||||
const firstInput = row.querySelector('input') as HTMLInputElement | null;
|
||||
if (firstInput) {
|
||||
firstInput.focus();
|
||||
firstInput.select();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}, [activeKey, readOnly]);
|
||||
|
||||
useEffect(() => {
|
||||
const pendingKey = pendingFocusColumnKeyRef.current;
|
||||
if (!pendingKey || activeKey !== 'columns') return;
|
||||
|
||||
let cancelled = false;
|
||||
const tryFocus = () => {
|
||||
if (cancelled) return;
|
||||
if (focusColumnRow(pendingKey)) {
|
||||
pendingFocusColumnKeyRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
const timerA = window.setTimeout(tryFocus, 0);
|
||||
const timerB = window.setTimeout(tryFocus, 96);
|
||||
return () => {
|
||||
cancelled = true;
|
||||
window.clearTimeout(timerA);
|
||||
window.clearTimeout(timerB);
|
||||
};
|
||||
}, [activeKey, columns, focusColumnRow]);
|
||||
|
||||
// Initial Columns Definition
|
||||
useEffect(() => {
|
||||
const initialCols = [
|
||||
@@ -885,21 +950,46 @@ ${selectedTrigger.statement}`;
|
||||
}));
|
||||
};
|
||||
|
||||
const handleAddColumn = () => {
|
||||
const newCol: EditableColumn = {
|
||||
name: isNewTable ? 'new_column' : `new_col_${columns.length + 1}`,
|
||||
type: 'varchar(255)',
|
||||
nullable: 'YES',
|
||||
key: '',
|
||||
extra: '',
|
||||
comment: '',
|
||||
default: '',
|
||||
_key: `new-${Date.now()}`,
|
||||
isNew: true,
|
||||
isAutoIncrement: false
|
||||
};
|
||||
setColumns([...columns, newCol]);
|
||||
};
|
||||
const createNewColumn = useCallback((indexHint: number): EditableColumn => ({
|
||||
name: isNewTable ? 'new_column' : `new_col_${indexHint}`,
|
||||
type: 'varchar(255)',
|
||||
nullable: 'YES',
|
||||
key: '',
|
||||
extra: '',
|
||||
comment: '',
|
||||
default: '',
|
||||
_key: `new-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
|
||||
isNew: true,
|
||||
isAutoIncrement: false
|
||||
}), [isNewTable]);
|
||||
|
||||
const handleAddColumn = useCallback((insertAfterKey?: string) => {
|
||||
const newCol = createNewColumn(columns.length + 1);
|
||||
setColumns(prev => {
|
||||
const next = [...prev];
|
||||
if (insertAfterKey) {
|
||||
const insertIndex = next.findIndex(col => col._key === insertAfterKey);
|
||||
if (insertIndex >= 0) {
|
||||
next.splice(insertIndex + 1, 0, newCol);
|
||||
return next;
|
||||
}
|
||||
}
|
||||
next.push(newCol);
|
||||
return next;
|
||||
});
|
||||
setSelectedColumnRowKeys([newCol._key]);
|
||||
pendingFocusColumnKeyRef.current = newCol._key;
|
||||
}, [columns.length, createNewColumn]);
|
||||
|
||||
const handleAddColumnAfterSelected = useCallback(() => {
|
||||
const selectedSet = new Set(selectedColumnRowKeys);
|
||||
const anchor = columns.find(col => selectedSet.has(col._key));
|
||||
if (!anchor) {
|
||||
message.warning('请先选择一个字段,再执行插入。');
|
||||
return;
|
||||
}
|
||||
handleAddColumn(anchor._key);
|
||||
}, [columns, handleAddColumn, selectedColumnRowKeys]);
|
||||
|
||||
const handleDeleteColumn = (key: string) => {
|
||||
setColumns(prev => prev.filter(c => c._key !== key));
|
||||
@@ -1919,22 +2009,35 @@ END;`;
|
||||
}));
|
||||
|
||||
const columnsTabContent = (
|
||||
<div ref={containerRef} className="table-designer-wrapper" style={{ height: '100%', overflow: 'hidden', position: 'relative' }}>
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="table-designer-wrapper"
|
||||
style={{
|
||||
height: '100%',
|
||||
overflow: 'hidden',
|
||||
position: 'relative',
|
||||
background: panelBodyBg
|
||||
}}
|
||||
>
|
||||
<style>{`
|
||||
.table-designer-wrapper .ant-table-body {
|
||||
max-height: ${tableHeight}px !important;
|
||||
}
|
||||
}
|
||||
.table-designer-wrapper .table-designer-focus-row > .ant-table-cell {
|
||||
background: ${focusRowBg} !important;
|
||||
}
|
||||
`}</style>
|
||||
{readOnly ? (
|
||||
<Table
|
||||
dataSource={columns}
|
||||
columns={resizableColumns}
|
||||
rowKey="_key"
|
||||
rowClassName={(record: EditableColumn) => record._key === focusColumnKey ? 'table-designer-focus-row' : ''}
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
scroll={{ y: tableHeight }}
|
||||
bordered
|
||||
bordered={false}
|
||||
components={{
|
||||
header: {
|
||||
cell: ResizableTitle,
|
||||
@@ -1952,11 +2055,12 @@ END;`;
|
||||
onChange: (nextSelectedRowKeys) => setSelectedColumnRowKeys(nextSelectedRowKeys as string[]),
|
||||
}}
|
||||
rowKey="_key"
|
||||
rowClassName={(record: EditableColumn) => record._key === focusColumnKey ? 'table-designer-focus-row' : ''}
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
scroll={{ y: tableHeight }}
|
||||
bordered
|
||||
bordered={false}
|
||||
components={{
|
||||
body: { row: SortableRow },
|
||||
header: { cell: ResizableTitle }
|
||||
@@ -1973,7 +2077,7 @@ END;`;
|
||||
bottom: 0,
|
||||
left: 0,
|
||||
width: '2px',
|
||||
background: '#1890ff',
|
||||
background: resizeGuideColor,
|
||||
zIndex: 9999,
|
||||
display: 'none',
|
||||
pointerEvents: 'none',
|
||||
@@ -1984,8 +2088,63 @@ END;`;
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px', borderBottom: '1px solid #eee', display: 'flex', gap: '8px', alignItems: 'center' }}>
|
||||
<div className="table-designer-shell" style={{ display: 'flex', flexDirection: 'column', height: '100%', minHeight: 0, padding: '6px 0' }}>
|
||||
<style>{`
|
||||
.table-designer-shell .ant-table,
|
||||
.table-designer-shell .ant-table-wrapper,
|
||||
.table-designer-shell .ant-table-container {
|
||||
background: transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-wrapper,
|
||||
.table-designer-shell .ant-table-container {
|
||||
border: none !important;
|
||||
overflow: hidden !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-thead > tr > th {
|
||||
background: transparent !important;
|
||||
border-bottom: 1px solid ${darkMode ? 'rgba(255,255,255,0.06)' : 'rgba(0,0,0,0.06)'} !important;
|
||||
border-inline-end: 1px solid transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-tbody > tr > td,
|
||||
.table-designer-shell .ant-table-tbody .ant-table-row > .ant-table-cell {
|
||||
background: transparent !important;
|
||||
border-bottom: 1px solid ${darkMode ? 'rgba(255,255,255,0.05)' : 'rgba(0,0,0,0.05)'} !important;
|
||||
border-inline-end: 1px solid transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-thead > tr > th::before {
|
||||
display: none !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-tbody > tr:hover > td,
|
||||
.table-designer-shell .ant-table-tbody .ant-table-row:hover > .ant-table-cell {
|
||||
background: ${darkMode ? 'rgba(255,255,255,0.06)' : 'rgba(0,0,0,0.02)'} !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-nav {
|
||||
margin-bottom: 8px !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-nav::before {
|
||||
border-bottom-color: ${darkMode ? 'rgba(255,255,255,0.08)' : 'rgba(0,0,0,0.08)'} !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-content-holder,
|
||||
.table-designer-shell .ant-tabs-content,
|
||||
.table-designer-shell .ant-tabs-tabpane {
|
||||
height: 100%;
|
||||
}
|
||||
`}</style>
|
||||
<div
|
||||
style={{
|
||||
padding: '10px 12px 8px 12px',
|
||||
borderBottom: `1px solid ${panelToolbarBorder}`,
|
||||
borderTopLeftRadius: panelRadius,
|
||||
borderTopRightRadius: panelRadius,
|
||||
borderLeft: `1px solid ${panelFrameColor}`,
|
||||
borderRight: `1px solid ${panelFrameColor}`,
|
||||
borderTop: `1px solid ${panelFrameColor}`,
|
||||
background: panelToolbarBg,
|
||||
display: 'flex',
|
||||
gap: '8px',
|
||||
alignItems: 'center'
|
||||
}}
|
||||
>
|
||||
{isNewTable && (
|
||||
<>
|
||||
<Input
|
||||
@@ -2013,14 +2172,25 @@ END;`;
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{!readOnly && <Button icon={<SaveOutlined />} type="primary" onClick={generateDDL}>保存</Button>}
|
||||
{!isNewTable && <Button icon={<ReloadOutlined />} onClick={fetchData}>刷新</Button>}
|
||||
{!readOnly && <Button size="small" icon={<SaveOutlined />} type="primary" onClick={generateDDL}>保存</Button>}
|
||||
{!isNewTable && <Button size="small" icon={<ReloadOutlined />} onClick={fetchData}>刷新</Button>}
|
||||
{!isNewTable && !readOnly && supportsTableCommentOps() && (
|
||||
<Button icon={<EditOutlined />} onClick={openTableCommentModal}>表备注</Button>
|
||||
<Button size="small" icon={<EditOutlined />} onClick={openTableCommentModal}>表备注</Button>
|
||||
)}
|
||||
{!readOnly && <Button icon={<PlusOutlined />} onClick={handleAddColumn}>添加字段</Button>}
|
||||
{!readOnly && <Button size="small" icon={<PlusOutlined />} onClick={() => handleAddColumn()}>添加字段</Button>}
|
||||
{!readOnly && (
|
||||
<Button
|
||||
size="small"
|
||||
icon={<PlusOutlined />}
|
||||
onClick={handleAddColumnAfterSelected}
|
||||
disabled={selectedColumnRowKeys.length === 0}
|
||||
>
|
||||
在选中字段后添加
|
||||
</Button>
|
||||
)}
|
||||
{!readOnly && (
|
||||
<Button
|
||||
size="small"
|
||||
icon={<CopyOutlined />}
|
||||
onClick={openCopySelectedColumnsModal}
|
||||
disabled={selectedColumns.length === 0}
|
||||
@@ -2033,7 +2203,17 @@ END;`;
|
||||
<Tabs
|
||||
activeKey={activeKey}
|
||||
onChange={setActiveKey}
|
||||
style={{ flex: 1, padding: '0 10px' }}
|
||||
style={{
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
padding: '8px 10px 10px 10px',
|
||||
borderBottomLeftRadius: panelRadius,
|
||||
borderBottomRightRadius: panelRadius,
|
||||
borderLeft: `1px solid ${panelFrameColor}`,
|
||||
borderRight: `1px solid ${panelFrameColor}`,
|
||||
borderBottom: `1px solid ${panelFrameColor}`,
|
||||
background: panelBodyBg
|
||||
}}
|
||||
items={[
|
||||
{
|
||||
key: 'columns',
|
||||
@@ -2275,7 +2455,7 @@ END;`;
|
||||
label: 'DDL',
|
||||
icon: <FileTextOutlined />,
|
||||
children: (
|
||||
<div style={{ height: 'calc(100vh - 200px)', border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ height: 'calc(100vh - 200px)', border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
@@ -2311,7 +2491,7 @@ END;`;
|
||||
okText="应用"
|
||||
cancelText="取消"
|
||||
width={640}
|
||||
destroyOnClose
|
||||
destroyOnHidden
|
||||
>
|
||||
<Input.TextArea
|
||||
value={commentEditorValue}
|
||||
@@ -2516,7 +2696,7 @@ END;`;
|
||||
<span><strong>时机:</strong> {selectedTrigger.timing}</span>
|
||||
<span><strong>事件:</strong> {selectedTrigger.event}</span>
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
@@ -2552,7 +2732,7 @@ END;`;
|
||||
<span>修改触发器时会先删除原触发器,再创建新触发器。</span>
|
||||
)}
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
|
||||
39
frontend/src/components/dataGridLayout.test.ts
Normal file
39
frontend/src/components/dataGridLayout.test.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { calculateTableBodyBottomPadding } from './dataGridLayout';
|
||||
|
||||
const assertEqual = (actual: unknown, expected: unknown, message: string) => {
|
||||
if (actual !== expected) {
|
||||
throw new Error(`${message}\nactual: ${String(actual)}\nexpected: ${String(expected)}`);
|
||||
}
|
||||
};
|
||||
|
||||
assertEqual(
|
||||
calculateTableBodyBottomPadding({
|
||||
hasHorizontalOverflow: false,
|
||||
floatingScrollbarHeight: 10,
|
||||
floatingScrollbarGap: 6,
|
||||
}),
|
||||
0,
|
||||
'无横向滚动条时不应增加底部间距'
|
||||
);
|
||||
|
||||
assertEqual(
|
||||
calculateTableBodyBottomPadding({
|
||||
hasHorizontalOverflow: true,
|
||||
floatingScrollbarHeight: 10,
|
||||
floatingScrollbarGap: 6,
|
||||
}),
|
||||
28,
|
||||
'默认悬浮滚动条应预留滚动条高度、间距和额外安全区'
|
||||
);
|
||||
|
||||
assertEqual(
|
||||
calculateTableBodyBottomPadding({
|
||||
hasHorizontalOverflow: true,
|
||||
floatingScrollbarHeight: 14,
|
||||
floatingScrollbarGap: 4,
|
||||
}),
|
||||
30,
|
||||
'较粗滚动条场景下应同步放大底部安全区'
|
||||
);
|
||||
|
||||
console.log('dataGridLayout tests passed');
|
||||
23
frontend/src/components/dataGridLayout.ts
Normal file
23
frontend/src/components/dataGridLayout.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export interface TableBodyBottomPaddingOptions {
|
||||
hasHorizontalOverflow: boolean;
|
||||
floatingScrollbarHeight: number;
|
||||
floatingScrollbarGap: number;
|
||||
}
|
||||
|
||||
const MIN_SCROLLBAR_CLEARANCE = 8;
|
||||
const FLOATING_SCROLLBAR_VISUAL_EXTRA = 4;
|
||||
|
||||
export const calculateTableBodyBottomPadding = ({
|
||||
hasHorizontalOverflow,
|
||||
floatingScrollbarHeight,
|
||||
floatingScrollbarGap,
|
||||
}: TableBodyBottomPaddingOptions): number => {
|
||||
if (!hasHorizontalOverflow) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const safeScrollbarHeight = Math.max(0, Math.ceil(floatingScrollbarHeight));
|
||||
const safeScrollbarGap = Math.max(0, Math.ceil(floatingScrollbarGap));
|
||||
|
||||
return safeScrollbarHeight + FLOATING_SCROLLBAR_VISUAL_EXTRA + safeScrollbarGap + MIN_SCROLLBAR_CLEARANCE;
|
||||
};
|
||||
105
frontend/src/components/redisViewerTree.test.ts
Normal file
105
frontend/src/components/redisViewerTree.test.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { RedisKeyInfo } from '../types';
|
||||
import {
|
||||
applyRenamedRedisKeyState,
|
||||
applyTreeNodeCheck,
|
||||
buildCheckedTreeNodeState,
|
||||
buildRedisKeyTree,
|
||||
isGroupFullyChecked,
|
||||
} from './redisViewerTree';
|
||||
|
||||
const assert = (condition: unknown, message: string) => {
|
||||
if (!condition) {
|
||||
throw new Error(message);
|
||||
}
|
||||
};
|
||||
|
||||
const assertEqual = (actual: unknown, expected: unknown, message: string) => {
|
||||
const actualText = JSON.stringify(actual);
|
||||
const expectedText = JSON.stringify(expected);
|
||||
if (actualText !== expectedText) {
|
||||
throw new Error(`${message}\nactual: ${actualText}\nexpected: ${expectedText}`);
|
||||
}
|
||||
};
|
||||
|
||||
const sampleKeys: RedisKeyInfo[] = [
|
||||
{ key: 'app:user:1', type: 'string', ttl: -1 },
|
||||
{ key: 'app:user:2', type: 'string', ttl: -1 },
|
||||
{ key: 'app:order:1', type: 'hash', ttl: 120 },
|
||||
{ key: 'misc', type: 'set', ttl: -1 },
|
||||
];
|
||||
|
||||
const tree = buildRedisKeyTree(sampleKeys, true);
|
||||
const appGroup = tree.treeData.find((node) => node.key === 'group:app');
|
||||
const userGroup = appGroup?.children?.find((node) => node.key === 'group:app:user');
|
||||
|
||||
assert(appGroup, '应生成 group:app 节点');
|
||||
assert(userGroup, '应生成 group:app:user 节点');
|
||||
assertEqual(
|
||||
appGroup?.descendantRawKeys,
|
||||
['app:order:1', 'app:user:1', 'app:user:2'],
|
||||
'app 分组应收集全部后代 key'
|
||||
);
|
||||
|
||||
const selectedAfterGroupCheck = applyTreeNodeCheck([], appGroup!, true);
|
||||
assertEqual(
|
||||
selectedAfterGroupCheck,
|
||||
['app:order:1', 'app:user:1', 'app:user:2'],
|
||||
'勾选分组应递归选中全部后代 key'
|
||||
);
|
||||
|
||||
const checkedState = buildCheckedTreeNodeState(selectedAfterGroupCheck, tree);
|
||||
assertEqual(
|
||||
checkedState.checked,
|
||||
['key:app:order:1', 'group:app:order', 'key:app:user:1', 'key:app:user:2', 'group:app:user', 'group:app'],
|
||||
'全部后代已选中时,父分组和叶子都应进入 checked'
|
||||
);
|
||||
assertEqual(checkedState.halfChecked, [], '全部后代已选中时不应有 halfChecked');
|
||||
assertEqual(isGroupFullyChecked(appGroup!, selectedAfterGroupCheck), true, '全部后代已选中时,分组应视为 fully checked');
|
||||
|
||||
const selectedAfterGroupUncheck = applyTreeNodeCheck(selectedAfterGroupCheck, appGroup!, false);
|
||||
assertEqual(selectedAfterGroupUncheck, [], '取消勾选分组应移除全部后代 key');
|
||||
assertEqual(isGroupFullyChecked(appGroup!, selectedAfterGroupUncheck), false, '取消后分组不应再是 fully checked');
|
||||
|
||||
const partialState = buildCheckedTreeNodeState(['app:user:1'], tree);
|
||||
assertEqual(
|
||||
partialState.halfChecked,
|
||||
['group:app:user', 'group:app'],
|
||||
'仅部分后代选中时,相关分组应进入 halfChecked'
|
||||
);
|
||||
assertEqual(isGroupFullyChecked(appGroup!, ['app:user:1']), false, '部分选中时分组不应是 fully checked');
|
||||
|
||||
const renamedState = applyRenamedRedisKeyState(
|
||||
{
|
||||
keys: sampleKeys,
|
||||
selectedKey: 'app:user:2',
|
||||
selectedKeys: ['app:user:1', 'app:user:2', 'misc'],
|
||||
},
|
||||
'app:user:2',
|
||||
'app:user:200'
|
||||
);
|
||||
|
||||
assertEqual(
|
||||
renamedState.keys.map((item) => item.key),
|
||||
['app:user:1', 'app:user:200', 'app:order:1', 'misc'],
|
||||
'重命名后 keys 列表应替换旧 key'
|
||||
);
|
||||
assertEqual(renamedState.selectedKey, 'app:user:200', '当前详情选中的 key 应切换为新 key');
|
||||
assertEqual(
|
||||
renamedState.selectedKeys,
|
||||
['app:user:1', 'app:user:200', 'misc'],
|
||||
'批量选中集合中的旧 key 应映射为新 key'
|
||||
);
|
||||
|
||||
const unrelatedRenameState = applyRenamedRedisKeyState(
|
||||
{
|
||||
keys: sampleKeys,
|
||||
selectedKey: 'misc',
|
||||
selectedKeys: ['app:user:1'],
|
||||
},
|
||||
'app:order:1',
|
||||
'app:order:9'
|
||||
);
|
||||
assertEqual(unrelatedRenameState.selectedKey, 'misc', '非当前详情 key 的重命名不应影响 selectedKey');
|
||||
assertEqual(unrelatedRenameState.selectedKeys, ['app:user:1'], '非已勾选 key 的重命名不应污染选中集合');
|
||||
|
||||
console.log('redisViewerTree tests passed');
|
||||
260
frontend/src/components/redisViewerTree.ts
Normal file
260
frontend/src/components/redisViewerTree.ts
Normal file
@@ -0,0 +1,260 @@
|
||||
import type { DataNode } from 'antd/es/tree';
|
||||
import type { RedisKeyInfo } from '../types';
|
||||
|
||||
const KEY_GROUP_DELIMITER = ':';
|
||||
const EMPTY_SEGMENT_LABEL = '(empty)';
|
||||
|
||||
type RedisKeyTreeLeaf = {
|
||||
keyInfo: RedisKeyInfo;
|
||||
label: string;
|
||||
};
|
||||
|
||||
type RedisKeyTreeGroup = {
|
||||
name: string;
|
||||
path: string;
|
||||
children: Map<string, RedisKeyTreeGroup>;
|
||||
leaves: RedisKeyTreeLeaf[];
|
||||
leafCount: number;
|
||||
};
|
||||
|
||||
export type RedisTreeDataNode = DataNode & {
|
||||
nodeType: 'group' | 'leaf';
|
||||
groupName?: string;
|
||||
groupLeafCount?: number;
|
||||
leafLabel?: string;
|
||||
rawKey?: string;
|
||||
keyType?: string;
|
||||
ttl?: number;
|
||||
descendantRawKeys?: string[];
|
||||
};
|
||||
|
||||
export type RedisKeyTreeResult = {
|
||||
treeData: RedisTreeDataNode[];
|
||||
groupKeys: string[];
|
||||
};
|
||||
|
||||
export type RedisTreeCheckedState = {
|
||||
checked: string[];
|
||||
halfChecked: string[];
|
||||
};
|
||||
|
||||
export type RenamedRedisKeyStateInput = {
|
||||
keys: RedisKeyInfo[];
|
||||
selectedKey: string | null;
|
||||
selectedKeys: string[];
|
||||
};
|
||||
|
||||
export type RenamedRedisKeyStateResult = {
|
||||
keys: RedisKeyInfo[];
|
||||
selectedKey: string | null;
|
||||
selectedKeys: string[];
|
||||
};
|
||||
|
||||
const normalizeKeySegment = (segment: string): string => {
|
||||
return segment === '' ? EMPTY_SEGMENT_LABEL : segment;
|
||||
};
|
||||
|
||||
const createTreeGroup = (name: string, path: string): RedisKeyTreeGroup => {
|
||||
return { name, path, children: new Map(), leaves: [], leafCount: 0 };
|
||||
};
|
||||
|
||||
const calculateGroupLeafCount = (group: RedisKeyTreeGroup): number => {
|
||||
let count = group.leaves.length;
|
||||
group.children.forEach((child) => {
|
||||
count += calculateGroupLeafCount(child);
|
||||
});
|
||||
group.leafCount = count;
|
||||
return count;
|
||||
};
|
||||
|
||||
export const buildLeafNodeKey = (rawKey: string): string => `key:${rawKey}`;
|
||||
|
||||
export const parseRawKeyFromNodeKey = (nodeKey: React.Key): string | null => {
|
||||
const keyText = String(nodeKey);
|
||||
if (!keyText.startsWith('key:')) {
|
||||
return null;
|
||||
}
|
||||
return keyText.slice(4);
|
||||
};
|
||||
|
||||
export const buildRedisKeyTree = (
|
||||
keys: RedisKeyInfo[],
|
||||
sortLeafNodes: boolean
|
||||
): RedisKeyTreeResult => {
|
||||
const root = createTreeGroup('__root__', '__root__');
|
||||
|
||||
keys.forEach((keyInfo) => {
|
||||
const segments = keyInfo.key.split(KEY_GROUP_DELIMITER);
|
||||
if (segments.length <= 1) {
|
||||
root.leaves.push({ keyInfo, label: keyInfo.key });
|
||||
return;
|
||||
}
|
||||
|
||||
const groupSegments = segments.slice(0, -1);
|
||||
const leafLabel = normalizeKeySegment(segments[segments.length - 1]);
|
||||
let current = root;
|
||||
const pathParts: string[] = [];
|
||||
|
||||
groupSegments.forEach((segment) => {
|
||||
const normalized = normalizeKeySegment(segment);
|
||||
pathParts.push(normalized);
|
||||
const groupPath = pathParts.join(KEY_GROUP_DELIMITER);
|
||||
let child = current.children.get(normalized);
|
||||
if (!child) {
|
||||
child = createTreeGroup(normalized, groupPath);
|
||||
current.children.set(normalized, child);
|
||||
}
|
||||
current = child;
|
||||
});
|
||||
|
||||
current.leaves.push({ keyInfo, label: leafLabel });
|
||||
});
|
||||
|
||||
calculateGroupLeafCount(root);
|
||||
const groupKeys: string[] = [];
|
||||
|
||||
const toTreeNodes = (group: RedisKeyTreeGroup): RedisTreeDataNode[] => {
|
||||
const childGroups = Array.from(group.children.values()).sort((a, b) => a.name.localeCompare(b.name));
|
||||
const childLeaves = sortLeafNodes
|
||||
? [...group.leaves].sort((a, b) => a.keyInfo.key.localeCompare(b.keyInfo.key))
|
||||
: group.leaves;
|
||||
|
||||
const groupNodes: RedisTreeDataNode[] = childGroups.map((child) => {
|
||||
const children = toTreeNodes(child);
|
||||
const descendantRawKeys = children.flatMap((node) => {
|
||||
if (node.nodeType === 'leaf') {
|
||||
return node.rawKey ? [node.rawKey] : [];
|
||||
}
|
||||
return node.descendantRawKeys || [];
|
||||
});
|
||||
const groupNodeKey = `group:${child.path}`;
|
||||
groupKeys.push(groupNodeKey);
|
||||
return {
|
||||
key: groupNodeKey,
|
||||
title: child.name,
|
||||
nodeType: 'group',
|
||||
groupName: child.name,
|
||||
groupLeafCount: child.leafCount,
|
||||
selectable: false,
|
||||
descendantRawKeys,
|
||||
children,
|
||||
};
|
||||
});
|
||||
|
||||
const leafNodes: RedisTreeDataNode[] = childLeaves.map((leaf) => {
|
||||
return {
|
||||
key: buildLeafNodeKey(leaf.keyInfo.key),
|
||||
isLeaf: true,
|
||||
title: leaf.label,
|
||||
nodeType: 'leaf',
|
||||
leafLabel: leaf.label,
|
||||
rawKey: leaf.keyInfo.key,
|
||||
keyType: leaf.keyInfo.type,
|
||||
ttl: leaf.keyInfo.ttl,
|
||||
};
|
||||
});
|
||||
|
||||
return [...groupNodes, ...leafNodes];
|
||||
};
|
||||
|
||||
return {
|
||||
treeData: toTreeNodes(root),
|
||||
groupKeys,
|
||||
};
|
||||
};
|
||||
|
||||
export const applyTreeNodeCheck = (
|
||||
selectedKeys: string[],
|
||||
node: RedisTreeDataNode,
|
||||
checked: boolean
|
||||
): string[] => {
|
||||
if (node.nodeType === 'leaf') {
|
||||
if (!node.rawKey) {
|
||||
return selectedKeys;
|
||||
}
|
||||
if (checked) {
|
||||
return Array.from(new Set([...selectedKeys, node.rawKey]));
|
||||
}
|
||||
return selectedKeys.filter((item) => item !== node.rawKey);
|
||||
}
|
||||
|
||||
const descendantRawKeys = node.descendantRawKeys || [];
|
||||
if (descendantRawKeys.length === 0) {
|
||||
return selectedKeys;
|
||||
}
|
||||
if (checked) {
|
||||
return Array.from(new Set([...selectedKeys, ...descendantRawKeys]));
|
||||
}
|
||||
const removeSet = new Set(descendantRawKeys);
|
||||
return selectedKeys.filter((item) => !removeSet.has(item));
|
||||
};
|
||||
|
||||
const walkGroupStates = (
|
||||
nodes: RedisTreeDataNode[],
|
||||
selectedKeySet: Set<string>,
|
||||
checked: string[],
|
||||
halfChecked: string[]
|
||||
) => {
|
||||
nodes.forEach((node) => {
|
||||
if (node.nodeType === 'leaf') {
|
||||
if (node.rawKey && selectedKeySet.has(node.rawKey)) {
|
||||
checked.push(String(node.key));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
walkGroupStates((node.children || []) as RedisTreeDataNode[], selectedKeySet, checked, halfChecked);
|
||||
const descendantRawKeys = node.descendantRawKeys || [];
|
||||
if (descendantRawKeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const selectedCount = descendantRawKeys.filter((rawKey) => selectedKeySet.has(rawKey)).length;
|
||||
if (selectedCount === descendantRawKeys.length) {
|
||||
checked.push(String(node.key));
|
||||
return;
|
||||
}
|
||||
if (selectedCount > 0) {
|
||||
halfChecked.push(String(node.key));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const buildCheckedTreeNodeState = (
|
||||
selectedKeys: string[],
|
||||
keyTree: RedisKeyTreeResult
|
||||
): RedisTreeCheckedState => {
|
||||
const selectedKeySet = new Set(selectedKeys);
|
||||
const checked: string[] = [];
|
||||
const halfChecked: string[] = [];
|
||||
|
||||
walkGroupStates(keyTree.treeData, selectedKeySet, checked, halfChecked);
|
||||
return { checked, halfChecked };
|
||||
};
|
||||
|
||||
export const isGroupFullyChecked = (
|
||||
node: RedisTreeDataNode,
|
||||
selectedKeys: string[]
|
||||
): boolean => {
|
||||
if (node.nodeType !== 'group') {
|
||||
return false;
|
||||
}
|
||||
const descendantRawKeys = node.descendantRawKeys || [];
|
||||
if (descendantRawKeys.length === 0) {
|
||||
return false;
|
||||
}
|
||||
const selectedKeySet = new Set(selectedKeys);
|
||||
return descendantRawKeys.every((rawKey) => selectedKeySet.has(rawKey));
|
||||
};
|
||||
|
||||
export const applyRenamedRedisKeyState = (
|
||||
state: RenamedRedisKeyStateInput,
|
||||
oldKey: string,
|
||||
newKey: string
|
||||
): RenamedRedisKeyStateResult => {
|
||||
return {
|
||||
keys: state.keys.map((item) => (item.key === oldKey ? { ...item, key: newKey } : item)),
|
||||
selectedKey: state.selectedKey === oldKey ? newKey : state.selectedKey,
|
||||
selectedKeys: state.selectedKeys.map((item) => (item === oldKey ? newKey : item)),
|
||||
};
|
||||
};
|
||||
50
frontend/src/components/redisViewerWorkbenchTheme.test.ts
Normal file
50
frontend/src/components/redisViewerWorkbenchTheme.test.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { buildRedisWorkbenchTheme } from './redisViewerWorkbenchTheme';
|
||||
|
||||
const assertEqual = (actual: unknown, expected: unknown, message: string) => {
|
||||
if (actual !== expected) {
|
||||
throw new Error(`${message}\nactual: ${String(actual)}\nexpected: ${String(expected)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const assertNotEqual = (actual: unknown, expected: unknown, message: string) => {
|
||||
if (actual === expected) {
|
||||
throw new Error(`${message}\nactual: ${String(actual)}\nnotExpected: ${String(expected)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const assertMatch = (value: string, pattern: RegExp, message: string) => {
|
||||
if (!pattern.test(value)) {
|
||||
throw new Error(`${message}\nactual: ${value}\npattern: ${String(pattern)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const darkTheme = buildRedisWorkbenchTheme({
|
||||
darkMode: true,
|
||||
opacity: 0.72,
|
||||
blur: 14,
|
||||
});
|
||||
|
||||
assertEqual(darkTheme.isDark, true, 'dark 主题标记应为 true');
|
||||
assertMatch(darkTheme.panelBg, /^rgba\(/, 'dark 主题面板背景应为 rgba');
|
||||
assertMatch(darkTheme.toolbarPrimaryBg, /^linear-gradient\(/, '工具栏主按钮应使用渐变背景');
|
||||
assertNotEqual(darkTheme.actionDangerBg, darkTheme.actionSecondaryBg, '危险态按钮背景不应与普通按钮相同');
|
||||
assertNotEqual(darkTheme.treeSelectedBg, darkTheme.treeHoverBg, '树节点选中态与悬浮态不应相同');
|
||||
assertMatch(darkTheme.appBg, /rgba\(15, 15, 17,/, 'dark 背景应保持中性黑基底');
|
||||
assertMatch(darkTheme.panelBg, /rgba\(24, 24, 28,/, 'dark 面板背景应保持中性黑灰');
|
||||
assertMatch(darkTheme.panelBgStrong, /rgba\(31, 31, 36,/, 'dark 强面板背景应保持中性黑灰');
|
||||
assertEqual(darkTheme.backdropFilter, 'blur(14px)', 'blur 参数应映射为 backdropFilter');
|
||||
|
||||
const lightTheme = buildRedisWorkbenchTheme({
|
||||
darkMode: false,
|
||||
opacity: 1,
|
||||
blur: 0,
|
||||
});
|
||||
|
||||
assertEqual(lightTheme.isDark, false, 'light 主题标记应为 false');
|
||||
assertMatch(lightTheme.panelBg, /^rgba\(/, 'light 主题面板背景应为 rgba');
|
||||
assertMatch(lightTheme.contentEmptyBg, /^linear-gradient\(/, 'light 空状态背景应为渐变');
|
||||
assertNotEqual(lightTheme.textPrimary, lightTheme.textSecondary, '主次文本颜色应区分');
|
||||
assertNotEqual(lightTheme.statusTagBg, lightTheme.statusTagMutedBg, '状态 tag 应区分普通与弱化样式');
|
||||
assertEqual(lightTheme.backdropFilter, 'none', 'blur=0 时 backdropFilter 应为 none');
|
||||
|
||||
console.log('redisViewerWorkbenchTheme tests passed');
|
||||
129
frontend/src/components/redisViewerWorkbenchTheme.ts
Normal file
129
frontend/src/components/redisViewerWorkbenchTheme.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
type RedisWorkbenchThemeInput = {
|
||||
darkMode: boolean;
|
||||
opacity: number;
|
||||
blur: number;
|
||||
};
|
||||
|
||||
type RedisWorkbenchTheme = {
|
||||
isDark: boolean;
|
||||
appBg: string;
|
||||
panelBg: string;
|
||||
panelBgStrong: string;
|
||||
panelBgSubtle: string;
|
||||
panelBorder: string;
|
||||
panelInset: string;
|
||||
toolbarPrimaryBg: string;
|
||||
contentEmptyBg: string;
|
||||
textPrimary: string;
|
||||
textSecondary: string;
|
||||
textMuted: string;
|
||||
accent: string;
|
||||
accentSoft: string;
|
||||
accentBorder: string;
|
||||
actionSecondaryBg: string;
|
||||
actionSecondaryBorder: string;
|
||||
actionDangerBg: string;
|
||||
actionDangerBorder: string;
|
||||
actionDangerText: string;
|
||||
statusTagBg: string;
|
||||
statusTagBorder: string;
|
||||
statusTagMutedBg: string;
|
||||
statusTagMutedBorder: string;
|
||||
treeHoverBg: string;
|
||||
treeSelectedBg: string;
|
||||
treeSelectedBorder: string;
|
||||
divider: string;
|
||||
shadow: string;
|
||||
backdropFilter: string;
|
||||
};
|
||||
|
||||
const clamp = (value: number, min: number, max: number) => Math.min(max, Math.max(min, value));
|
||||
|
||||
export const buildRedisWorkbenchTheme = ({
|
||||
darkMode,
|
||||
opacity,
|
||||
blur,
|
||||
}: RedisWorkbenchThemeInput): RedisWorkbenchTheme => {
|
||||
const normalizedOpacity = clamp(opacity, 0.1, 1);
|
||||
const normalizedBlur = Math.max(0, Math.round(blur));
|
||||
const isTranslucent = normalizedOpacity < 0.999 || normalizedBlur > 0;
|
||||
|
||||
if (darkMode) {
|
||||
const appTopAlpha = isTranslucent ? Math.max(0.08, Math.min(0.22, normalizedOpacity * 0.16)) : 0.92;
|
||||
const appBottomAlpha = isTranslucent ? Math.max(0.12, Math.min(0.28, normalizedOpacity * 0.22)) : 0.96;
|
||||
const panelAlpha = isTranslucent ? Math.max(0.06, Math.min(0.16, normalizedOpacity * 0.1)) : 0.34;
|
||||
const strongAlpha = isTranslucent ? Math.max(0.1, Math.min(0.22, normalizedOpacity * 0.16)) : 0.42;
|
||||
const subtleAlpha = isTranslucent ? Math.max(0.03, Math.min(0.08, normalizedOpacity * 0.05)) : 0.08;
|
||||
return {
|
||||
isDark: true,
|
||||
appBg: `linear-gradient(180deg, rgba(15, 15, 17, ${appTopAlpha}) 0%, rgba(11, 11, 13, ${appBottomAlpha}) 100%)`,
|
||||
panelBg: `rgba(24, 24, 28, ${panelAlpha})`,
|
||||
panelBgStrong: `rgba(31, 31, 36, ${strongAlpha})`,
|
||||
panelBgSubtle: `rgba(255, 255, 255, ${subtleAlpha})`,
|
||||
panelBorder: `1px solid rgba(255, 255, 255, ${isTranslucent ? Math.max(0.12, Math.min(0.24, normalizedOpacity * 0.2)) : 0.08})`,
|
||||
panelInset: `inset 0 1px 0 rgba(255,255,255,${isTranslucent ? Math.max(0.05, Math.min(0.12, normalizedOpacity * 0.1)) : 0.04})`,
|
||||
toolbarPrimaryBg: `linear-gradient(135deg, rgba(246,196,83,0.22) 0%, rgba(246,196,83,0.12) 100%)`,
|
||||
contentEmptyBg: `linear-gradient(180deg, rgba(255,255,255,0.03) 0%, rgba(255,255,255,0.015) 100%)`,
|
||||
textPrimary: 'rgba(245, 247, 251, 0.96)',
|
||||
textSecondary: 'rgba(218, 224, 235, 0.82)',
|
||||
textMuted: 'rgba(168, 177, 194, 0.72)',
|
||||
accent: '#f6c453',
|
||||
accentSoft: 'rgba(246, 196, 83, 0.18)',
|
||||
accentBorder: 'rgba(246, 196, 83, 0.3)',
|
||||
actionSecondaryBg: 'rgba(255, 255, 255, 0.04)',
|
||||
actionSecondaryBorder: 'rgba(255, 255, 255, 0.09)',
|
||||
actionDangerBg: 'rgba(255, 95, 95, 0.12)',
|
||||
actionDangerBorder: 'rgba(255, 95, 95, 0.28)',
|
||||
actionDangerText: '#ff8f8f',
|
||||
statusTagBg: 'rgba(25, 106, 255, 0.16)',
|
||||
statusTagBorder: 'rgba(25, 106, 255, 0.28)',
|
||||
statusTagMutedBg: 'rgba(255, 255, 255, 0.04)',
|
||||
statusTagMutedBorder: 'rgba(255, 255, 255, 0.08)',
|
||||
treeHoverBg: 'rgba(255, 255, 255, 0.045)',
|
||||
treeSelectedBg: 'linear-gradient(90deg, rgba(246,196,83,0.2) 0%, rgba(246,196,83,0.08) 100%)',
|
||||
treeSelectedBorder: 'rgba(246, 196, 83, 0.24)',
|
||||
divider: 'rgba(255, 255, 255, 0.07)',
|
||||
shadow: '0 20px 48px rgba(0, 0, 0, 0.26)',
|
||||
backdropFilter: normalizedBlur > 0 ? `blur(${normalizedBlur}px)` : 'none',
|
||||
};
|
||||
}
|
||||
|
||||
const appTopAlpha = isTranslucent ? Math.max(0.16, Math.min(0.36, normalizedOpacity * 0.24)) : 0.98;
|
||||
const appBottomAlpha = isTranslucent ? Math.max(0.22, Math.min(0.44, normalizedOpacity * 0.32)) : 0.96;
|
||||
const panelAlpha = isTranslucent ? Math.max(0.18, Math.min(0.4, normalizedOpacity * 0.26)) : 0.94;
|
||||
const strongAlpha = isTranslucent ? Math.max(0.26, Math.min(0.52, normalizedOpacity * 0.34)) : 0.98;
|
||||
return {
|
||||
isDark: false,
|
||||
appBg: `linear-gradient(180deg, rgba(248, 250, 252, ${appTopAlpha}) 0%, rgba(242, 245, 248, ${appBottomAlpha}) 100%)`,
|
||||
panelBg: `rgba(255, 255, 255, ${panelAlpha})`,
|
||||
panelBgStrong: `rgba(255, 255, 255, ${strongAlpha})`,
|
||||
panelBgSubtle: 'rgba(15, 23, 42, 0.03)',
|
||||
panelBorder: `1px solid rgba(15, 23, 42, ${isTranslucent ? Math.max(0.1, Math.min(0.18, normalizedOpacity * 0.12)) : 0.08})`,
|
||||
panelInset: `inset 0 1px 0 rgba(255,255,255,${isTranslucent ? 0.38 : 0.72})`,
|
||||
toolbarPrimaryBg: 'linear-gradient(135deg, rgba(22,119,255,0.12) 0%, rgba(22,119,255,0.06) 100%)',
|
||||
contentEmptyBg: 'linear-gradient(180deg, rgba(15,23,42,0.02) 0%, rgba(15,23,42,0.01) 100%)',
|
||||
textPrimary: 'rgba(15, 23, 42, 0.92)',
|
||||
textSecondary: 'rgba(51, 65, 85, 0.82)',
|
||||
textMuted: 'rgba(100, 116, 139, 0.76)',
|
||||
accent: '#1677ff',
|
||||
accentSoft: 'rgba(22, 119, 255, 0.12)',
|
||||
accentBorder: 'rgba(22, 119, 255, 0.22)',
|
||||
actionSecondaryBg: 'rgba(255, 255, 255, 0.72)',
|
||||
actionSecondaryBorder: 'rgba(15, 23, 42, 0.08)',
|
||||
actionDangerBg: 'rgba(255, 77, 79, 0.08)',
|
||||
actionDangerBorder: 'rgba(255, 77, 79, 0.24)',
|
||||
actionDangerText: '#cf1322',
|
||||
statusTagBg: 'rgba(22, 119, 255, 0.1)',
|
||||
statusTagBorder: 'rgba(22, 119, 255, 0.16)',
|
||||
statusTagMutedBg: 'rgba(15, 23, 42, 0.04)',
|
||||
statusTagMutedBorder: 'rgba(15, 23, 42, 0.08)',
|
||||
treeHoverBg: 'rgba(15, 23, 42, 0.035)',
|
||||
treeSelectedBg: 'linear-gradient(90deg, rgba(22,119,255,0.12) 0%, rgba(22,119,255,0.05) 100%)',
|
||||
treeSelectedBorder: 'rgba(22, 119, 255, 0.18)',
|
||||
divider: 'rgba(15, 23, 42, 0.08)',
|
||||
shadow: '0 22px 52px rgba(15, 23, 42, 0.08)',
|
||||
backdropFilter: normalizedBlur > 0 ? `blur(${normalizedBlur}px)` : 'none',
|
||||
};
|
||||
};
|
||||
|
||||
export type { RedisWorkbenchTheme, RedisWorkbenchThemeInput };
|
||||
@@ -9,6 +9,36 @@ import { loader } from '@monaco-editor/react'
|
||||
import * as monaco from 'monaco-editor'
|
||||
loader.config({ monaco })
|
||||
|
||||
if (typeof window !== 'undefined' && !(window as any).go) {
|
||||
(window as any).go = {
|
||||
app: {
|
||||
App: {
|
||||
CheckUpdate: async () => ({ success: false }),
|
||||
DownloadUpdate: async () => ({ success: false }),
|
||||
GetSavedConnections: async () => [],
|
||||
SaveConnection: async () => null,
|
||||
DeleteConnection: async () => null,
|
||||
OpenConnection: async () => null,
|
||||
CloseConnection: async () => null,
|
||||
GetDatabases: async () => [],
|
||||
GetTables: async () => [],
|
||||
GetTableData: async () => ({ columns: [], rows: [], total: 0 }),
|
||||
GetTableColumns: async () => [],
|
||||
ExecuteQuery: async () => ({ columns: [], rows: [], time: 0 }),
|
||||
GetSavedQueries: async () => [],
|
||||
SaveQuery: async () => null,
|
||||
DeleteQuery: async () => null,
|
||||
GetAppInfo: async () => ({}),
|
||||
CheckForUpdates: async () => ({ success: false }),
|
||||
OpenDownloadedUpdateDirectory: async () => ({ success: false }),
|
||||
InstallUpdateAndRestart: async () => ({ success: false }),
|
||||
ImportConfigFile: async () => ({ success: false }),
|
||||
ExportData: async () => ({ success: false }),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// 全局注册透明主题,避免每个 Editor 组件 beforeMount 中重复定义
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark', inherit: true, rules: [],
|
||||
|
||||
@@ -1,8 +1,22 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { ConnectionConfig, SavedConnection, TabData, SavedQuery } from './types';
|
||||
import { ConnectionConfig, ProxyConfig, SavedConnection, TabData, SavedQuery, ConnectionTag } from './types';
|
||||
import {
|
||||
ShortcutAction,
|
||||
ShortcutBinding,
|
||||
ShortcutOptions,
|
||||
DEFAULT_SHORTCUT_OPTIONS,
|
||||
cloneShortcutOptions,
|
||||
sanitizeShortcutOptions,
|
||||
} from './utils/shortcuts';
|
||||
|
||||
const DEFAULT_APPEARANCE = { opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_APPEARANCE = { enabled: true, opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_UI_SCALE = 1.0;
|
||||
const MIN_UI_SCALE = 0.8;
|
||||
const MAX_UI_SCALE = 1.25;
|
||||
const DEFAULT_FONT_SIZE = 14;
|
||||
const MIN_FONT_SIZE = 12;
|
||||
const MAX_FONT_SIZE = 20;
|
||||
const DEFAULT_STARTUP_FULLSCREEN = false;
|
||||
const LEGACY_DEFAULT_OPACITY = 0.95;
|
||||
const OPACITY_EPSILON = 1e-6;
|
||||
@@ -11,12 +25,23 @@ const MAX_HOST_ENTRY_LENGTH = 512;
|
||||
const MAX_HOST_ENTRIES = 64;
|
||||
const DEFAULT_TIMEOUT_SECONDS = 30;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const PERSIST_VERSION = 6;
|
||||
const DEFAULT_CONNECTION_TYPE = 'mysql';
|
||||
const DEFAULT_GLOBAL_PROXY: GlobalProxyConfig = {
|
||||
enabled: false,
|
||||
type: 'socks5',
|
||||
host: '',
|
||||
port: 1080,
|
||||
user: '',
|
||||
password: '',
|
||||
};
|
||||
const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'doris',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'redis',
|
||||
'tdengine',
|
||||
@@ -31,18 +56,38 @@ const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'duckdb',
|
||||
'custom',
|
||||
]);
|
||||
const SSL_SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'dameng',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'sqlserver',
|
||||
'oracle',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'mongodb',
|
||||
'redis',
|
||||
'tdengine',
|
||||
]);
|
||||
|
||||
const getDefaultPortByType = (type: string): number => {
|
||||
switch (type) {
|
||||
case 'mysql':
|
||||
case 'mariadb':
|
||||
return 3306;
|
||||
case 'doris':
|
||||
case 'diros':
|
||||
return 9030;
|
||||
case 'duckdb':
|
||||
return 0;
|
||||
case 'sphinx':
|
||||
return 9306;
|
||||
case 'clickhouse':
|
||||
return 9000;
|
||||
case 'postgres':
|
||||
case 'vastbase':
|
||||
return 5432;
|
||||
@@ -93,6 +138,13 @@ const normalizeIntegerInRange = (value: unknown, fallbackValue: number, min: num
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const normalizeFloatInRange = (value: unknown, fallbackValue: number, min: number, max: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackValue;
|
||||
if (parsed < min || parsed > max) return fallbackValue;
|
||||
return parsed;
|
||||
};
|
||||
|
||||
const isValidHostEntry = (entry: string): boolean => {
|
||||
if (!entry) return false;
|
||||
if (entry.length > MAX_HOST_ENTRY_LENGTH) return false;
|
||||
@@ -138,6 +190,9 @@ const sanitizeAddressList = (value: unknown): string[] => {
|
||||
|
||||
const normalizeConnectionType = (value: unknown): string => {
|
||||
const type = toTrimmedString(value).toLowerCase();
|
||||
if (type === 'doris') {
|
||||
return 'diros';
|
||||
}
|
||||
return SUPPORTED_CONNECTION_TYPES.has(type) ? type : DEFAULT_CONNECTION_TYPE;
|
||||
};
|
||||
|
||||
@@ -147,6 +202,16 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
const defaultPort = getDefaultPortByType(type);
|
||||
const savePassword = typeof raw.savePassword === 'boolean' ? raw.savePassword : true;
|
||||
const mongoSrv = !!raw.mongoSrv;
|
||||
const sslCapable = SSL_SUPPORTED_CONNECTION_TYPES.has(type);
|
||||
const sslModeRaw = toTrimmedString(raw.sslMode, 'preferred').toLowerCase();
|
||||
const sslMode: 'preferred' | 'required' | 'skip-verify' | 'disable' =
|
||||
sslModeRaw === 'required'
|
||||
? 'required'
|
||||
: sslModeRaw === 'skip-verify'
|
||||
? 'skip-verify'
|
||||
: sslModeRaw === 'disable'
|
||||
? 'disable'
|
||||
: 'preferred';
|
||||
|
||||
const sshRaw = (raw.ssh && typeof raw.ssh === 'object') ? raw.ssh as Record<string, unknown> : {};
|
||||
const ssh = {
|
||||
@@ -166,6 +231,18 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
user: toTrimmedString(proxyRaw.user),
|
||||
password: toTrimmedString(proxyRaw.password),
|
||||
};
|
||||
const httpTunnelRaw = (raw.httpTunnel && typeof raw.httpTunnel === 'object')
|
||||
? raw.httpTunnel as Record<string, unknown>
|
||||
: ((raw.HTTPTunnel && typeof raw.HTTPTunnel === 'object') ? raw.HTTPTunnel as Record<string, unknown> : {});
|
||||
const httpTunnel = {
|
||||
host: toTrimmedString(httpTunnelRaw.host ?? raw.httpTunnelHost),
|
||||
port: normalizePort(httpTunnelRaw.port ?? raw.httpTunnelPort, 8080),
|
||||
user: toTrimmedString(httpTunnelRaw.user ?? raw.httpTunnelUser),
|
||||
password: toTrimmedString(httpTunnelRaw.password ?? raw.httpTunnelPassword),
|
||||
};
|
||||
const supportsNetworkTunnel = type !== 'sqlite' && type !== 'duckdb';
|
||||
const useHttpTunnel = supportsNetworkTunnel && (raw.useHttpTunnel === true || raw.UseHTTPTunnel === true);
|
||||
const useProxy = supportsNetworkTunnel && !!raw.useProxy && !useHttpTunnel;
|
||||
|
||||
const safeConfig: ConnectionConfig & Record<string, unknown> = {
|
||||
...raw,
|
||||
@@ -176,13 +253,19 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
password: savePassword ? toTrimmedString(raw.password) : '',
|
||||
savePassword,
|
||||
database: toTrimmedString(raw.database),
|
||||
useSSL: sslCapable ? !!raw.useSSL : false,
|
||||
sslMode: sslCapable ? sslMode : 'disable',
|
||||
sslCertPath: sslCapable ? toTrimmedString(raw.sslCertPath) : '',
|
||||
sslKeyPath: sslCapable ? toTrimmedString(raw.sslKeyPath) : '',
|
||||
useSSH: !!raw.useSSH,
|
||||
ssh,
|
||||
useProxy: !!raw.useProxy,
|
||||
useProxy,
|
||||
proxy,
|
||||
useHttpTunnel,
|
||||
httpTunnel,
|
||||
uri: toTrimmedString(raw.uri).slice(0, MAX_URI_LENGTH),
|
||||
hosts: sanitizeAddressList(raw.hosts),
|
||||
topology: raw.topology === 'replica' ? 'replica' : 'single',
|
||||
topology: raw.topology === 'replica' ? 'replica' : (raw.topology === 'cluster' ? 'cluster' : 'single'),
|
||||
mysqlReplicaUser: toTrimmedString(raw.mysqlReplicaUser),
|
||||
mysqlReplicaPassword: savePassword ? toTrimmedString(raw.mysqlReplicaPassword) : '',
|
||||
replicaSet: toTrimmedString(raw.replicaSet),
|
||||
@@ -229,7 +312,8 @@ const sanitizeSavedConnection = (value: unknown, index: number): SavedConnection
|
||||
const raw = value as Record<string, unknown>;
|
||||
const config = sanitizeConnectionConfig(resolveConnectionConfigPayload(raw));
|
||||
const id = toTrimmedString(raw.id, `conn-${index + 1}`) || `conn-${index + 1}`;
|
||||
const fallbackName = config.host ? `${config.type}-${config.host}` : `连接-${index + 1}`;
|
||||
const displayType = config.type === 'diros' ? 'doris' : config.type;
|
||||
const fallbackName = config.host ? `${displayType}-${config.host}` : `连接-${index + 1}`;
|
||||
const name = toTrimmedString(raw.name, fallbackName) || fallbackName;
|
||||
const includeDatabases = sanitizeStringArray(raw.includeDatabases, 256);
|
||||
const includeRedisDatabases = sanitizeNumberArray(raw.includeRedisDatabases, 0, 15);
|
||||
@@ -262,6 +346,27 @@ const sanitizeConnections = (value: unknown): SavedConnection[] => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeConnectionTags = (value: unknown): ConnectionTag[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: ConnectionTag[] = [];
|
||||
const idSet = new Set<string>();
|
||||
|
||||
value.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') return;
|
||||
const raw = entry as Record<string, unknown>;
|
||||
const id = toTrimmedString(raw.id, `tag-${index + 1}`) || `tag-${index + 1}`;
|
||||
if (idSet.has(id)) return;
|
||||
idSet.add(id);
|
||||
|
||||
const name = toTrimmedString(raw.name, `标签-${index + 1}`) || `标签-${index + 1}`;
|
||||
const connectionIds = sanitizeStringArray(raw.connectionIds, 256);
|
||||
|
||||
result.push({ id, name, connectionIds });
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const isLegacyDefaultAppearance = (appearance: Partial<{ opacity: number; blur: number }> | undefined): boolean => {
|
||||
if (!appearance) {
|
||||
return true;
|
||||
@@ -288,25 +393,44 @@ export interface QueryOptions {
|
||||
showColumnType: boolean;
|
||||
}
|
||||
|
||||
export interface GlobalProxyConfig extends ProxyConfig {
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
interface AppState {
|
||||
connections: SavedConnection[];
|
||||
connectionTags: ConnectionTag[];
|
||||
tabs: TabData[];
|
||||
activeTabId: string | null;
|
||||
activeContext: { connectionId: string; dbName: string } | null;
|
||||
savedQueries: SavedQuery[];
|
||||
theme: 'light' | 'dark';
|
||||
appearance: { opacity: number; blur: number };
|
||||
appearance: { enabled: boolean; opacity: number; blur: number };
|
||||
uiScale: number;
|
||||
fontSize: number;
|
||||
startupFullscreen: boolean;
|
||||
globalProxy: GlobalProxyConfig;
|
||||
sqlFormatOptions: { keywordCase: 'upper' | 'lower' };
|
||||
queryOptions: QueryOptions;
|
||||
shortcutOptions: ShortcutOptions;
|
||||
sqlLogs: SqlLog[];
|
||||
tableAccessCount: Record<string, number>;
|
||||
tableSortPreference: Record<string, 'name' | 'frequency'>;
|
||||
tableColumnOrders: Record<string, string[]>;
|
||||
enableColumnOrderMemory: boolean;
|
||||
tableHiddenColumns: Record<string, string[]>;
|
||||
enableHiddenColumnMemory: boolean;
|
||||
|
||||
addConnection: (conn: SavedConnection) => void;
|
||||
updateConnection: (conn: SavedConnection) => void;
|
||||
removeConnection: (id: string) => void;
|
||||
|
||||
addConnectionTag: (tag: ConnectionTag) => void;
|
||||
updateConnectionTag: (tag: ConnectionTag) => void;
|
||||
removeConnectionTag: (id: string) => void;
|
||||
moveConnectionToTag: (connectionId: string, targetTagId: string | null) => void;
|
||||
reorderTags: (tagIds: string[]) => void;
|
||||
|
||||
addTab: (tab: TabData) => void;
|
||||
closeTab: (id: string) => void;
|
||||
closeOtherTabs: (id: string) => void;
|
||||
@@ -314,6 +438,7 @@ interface AppState {
|
||||
closeTabsToRight: (id: string) => void;
|
||||
closeTabsByConnection: (connectionId: string) => void;
|
||||
closeTabsByDatabase: (connectionId: string, dbName: string) => void;
|
||||
moveTab: (sourceId: string, targetId: string) => void;
|
||||
closeAllTabs: () => void;
|
||||
setActiveTab: (id: string) => void;
|
||||
setActiveContext: (context: { connectionId: string; dbName: string } | null) => void;
|
||||
@@ -322,16 +447,28 @@ interface AppState {
|
||||
deleteQuery: (id: string) => void;
|
||||
|
||||
setTheme: (theme: 'light' | 'dark') => void;
|
||||
setAppearance: (appearance: Partial<{ opacity: number; blur: number }>) => void;
|
||||
setAppearance: (appearance: Partial<{ enabled: boolean; opacity: number; blur: number }>) => void;
|
||||
setUiScale: (scale: number) => void;
|
||||
setFontSize: (size: number) => void;
|
||||
setStartupFullscreen: (enabled: boolean) => void;
|
||||
setGlobalProxy: (proxy: Partial<GlobalProxyConfig>) => void;
|
||||
setSqlFormatOptions: (options: { keywordCase: 'upper' | 'lower' }) => void;
|
||||
setQueryOptions: (options: Partial<QueryOptions>) => void;
|
||||
updateShortcut: (action: ShortcutAction, binding: Partial<ShortcutBinding>) => void;
|
||||
resetShortcutOptions: () => void;
|
||||
|
||||
addSqlLog: (log: SqlLog) => void;
|
||||
clearSqlLogs: () => void;
|
||||
|
||||
recordTableAccess: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
setTableSortPreference: (connectionId: string, dbName: string, sortBy: 'name' | 'frequency') => void;
|
||||
setTableColumnOrder: (connectionId: string, dbName: string, tableName: string, order: string[]) => void;
|
||||
setEnableColumnOrderMemory: (enabled: boolean) => void;
|
||||
clearTableColumnOrder: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
|
||||
setTableHiddenColumns: (connectionId: string, dbName: string, tableName: string, hiddenColumns: string[]) => void;
|
||||
setEnableHiddenColumnMemory: (enabled: boolean) => void;
|
||||
clearTableHiddenColumns: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
}
|
||||
|
||||
const sanitizeSavedQueries = (value: unknown): SavedQuery[] => {
|
||||
@@ -395,14 +532,37 @@ const sanitizeTableSortPreference = (value: unknown): Record<string, 'name' | 'f
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableColumnOrders = (value: unknown): Record<string, string[]> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, string[]> = {};
|
||||
Object.entries(raw).forEach(([key, orderArray]) => {
|
||||
if (Array.isArray(orderArray)) {
|
||||
result[key] = orderArray.map(col => String(col));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableHiddenColumns = (value: unknown): Record<string, string[]> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, string[]> = {};
|
||||
Object.entries(raw).forEach(([key, hiddenArray]) => {
|
||||
if (Array.isArray(hiddenArray)) {
|
||||
result[key] = hiddenArray.map(col => String(col));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeAppearance = (
|
||||
appearance: Partial<{ opacity: number; blur: number }> | undefined,
|
||||
appearance: Partial<{ enabled: boolean; opacity: number; blur: number }> | undefined,
|
||||
version: number
|
||||
): { opacity: number; blur: number } => {
|
||||
): { enabled: boolean; opacity: number; blur: number } => {
|
||||
if (!appearance || typeof appearance !== 'object') {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
const nextAppearance = {
|
||||
enabled: typeof appearance.enabled === 'boolean' ? appearance.enabled : DEFAULT_APPEARANCE.enabled,
|
||||
opacity: typeof appearance.opacity === 'number' ? appearance.opacity : DEFAULT_APPEARANCE.opacity,
|
||||
blur: typeof appearance.blur === 'number' ? appearance.blur : DEFAULT_APPEARANCE.blur,
|
||||
};
|
||||
@@ -416,6 +576,29 @@ const sanitizeStartupFullscreen = (value: unknown): boolean => {
|
||||
return value === true;
|
||||
};
|
||||
|
||||
const sanitizeUiScale = (value: unknown): number => {
|
||||
return normalizeFloatInRange(value, DEFAULT_UI_SCALE, MIN_UI_SCALE, MAX_UI_SCALE);
|
||||
};
|
||||
|
||||
const sanitizeFontSize = (value: unknown): number => {
|
||||
return normalizeIntegerInRange(value, DEFAULT_FONT_SIZE, MIN_FONT_SIZE, MAX_FONT_SIZE);
|
||||
};
|
||||
|
||||
const sanitizeGlobalProxy = (value: unknown): GlobalProxyConfig => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const typeRaw = toTrimmedString(raw.type, DEFAULT_GLOBAL_PROXY.type).toLowerCase();
|
||||
const type: 'socks5' | 'http' = typeRaw === 'http' ? 'http' : 'socks5';
|
||||
const fallbackPort = type === 'http' ? 8080 : 1080;
|
||||
return {
|
||||
enabled: raw.enabled === true,
|
||||
type,
|
||||
host: toTrimmedString(raw.host),
|
||||
port: normalizePort(raw.port, fallbackPort),
|
||||
user: toTrimmedString(raw.user),
|
||||
password: toTrimmedString(raw.password),
|
||||
};
|
||||
};
|
||||
|
||||
const unwrapPersistedAppState = (persistedState: unknown): Record<string, unknown> => {
|
||||
if (!persistedState || typeof persistedState !== 'object') {
|
||||
return {};
|
||||
@@ -431,24 +614,72 @@ export const useStore = create<AppState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
connections: [],
|
||||
connectionTags: [],
|
||||
tabs: [],
|
||||
activeTabId: null,
|
||||
activeContext: null,
|
||||
savedQueries: [],
|
||||
theme: 'light',
|
||||
appearance: { ...DEFAULT_APPEARANCE },
|
||||
uiScale: DEFAULT_UI_SCALE,
|
||||
fontSize: DEFAULT_FONT_SIZE,
|
||||
startupFullscreen: DEFAULT_STARTUP_FULLSCREEN,
|
||||
globalProxy: { ...DEFAULT_GLOBAL_PROXY },
|
||||
sqlFormatOptions: { keywordCase: 'upper' },
|
||||
queryOptions: { maxRows: 5000, showColumnComment: true, showColumnType: true },
|
||||
shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS),
|
||||
sqlLogs: [],
|
||||
tableAccessCount: {},
|
||||
tableSortPreference: {},
|
||||
tableColumnOrders: {},
|
||||
enableColumnOrderMemory: true,
|
||||
tableHiddenColumns: {},
|
||||
enableHiddenColumnMemory: true,
|
||||
|
||||
addConnection: (conn) => set((state) => ({ connections: [...state.connections, conn] })),
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
})),
|
||||
removeConnection: (id) => set((state) => ({ connections: state.connections.filter(c => c.id !== id) })),
|
||||
removeConnection: (id) => set((state) => ({
|
||||
connections: state.connections.filter(c => c.id !== id),
|
||||
connectionTags: state.connectionTags.map(tag => ({
|
||||
...tag,
|
||||
connectionIds: tag.connectionIds.filter(cid => cid !== id)
|
||||
}))
|
||||
})),
|
||||
|
||||
addConnectionTag: (tag) => set((state) => ({ connectionTags: [...state.connectionTags, tag] })),
|
||||
updateConnectionTag: (tag) => set((state) => ({
|
||||
connectionTags: state.connectionTags.map(t => t.id === tag.id ? tag : t)
|
||||
})),
|
||||
removeConnectionTag: (id) => set((state) => ({
|
||||
connectionTags: state.connectionTags.filter(t => t.id !== id)
|
||||
})),
|
||||
moveConnectionToTag: (connectionId, targetTagId) => set((state) => {
|
||||
const newTags = state.connectionTags.map(tag => {
|
||||
//先从所有tag中移除该connection
|
||||
const filteredIds = tag.connectionIds.filter(id => id !== connectionId);
|
||||
if (tag.id === targetTagId) {
|
||||
return { ...tag, connectionIds: [...filteredIds, connectionId] };
|
||||
}
|
||||
return { ...tag, connectionIds: filteredIds };
|
||||
});
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
reorderTags: (tagIds) => set((state) => {
|
||||
const tagMap = new Map(state.connectionTags.map(t => [t.id, t]));
|
||||
const newTags: ConnectionTag[] = [];
|
||||
tagIds.forEach(id => {
|
||||
const tag = tagMap.get(id);
|
||||
if (tag) {
|
||||
newTags.push(tag);
|
||||
tagMap.delete(id);
|
||||
}
|
||||
});
|
||||
// 追加未指定的tag(如果有的话)
|
||||
newTags.push(...Array.from(tagMap.values()));
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
|
||||
addTab: (tab) => set((state) => {
|
||||
const index = state.tabs.findIndex(t => t.id === tab.id);
|
||||
@@ -531,6 +762,23 @@ export const useStore = create<AppState>()(
|
||||
};
|
||||
}),
|
||||
|
||||
moveTab: (sourceId, targetId) => set((state) => {
|
||||
const fromId = String(sourceId || '').trim();
|
||||
const toId = String(targetId || '').trim();
|
||||
if (!fromId || !toId || fromId === toId) {
|
||||
return state;
|
||||
}
|
||||
const fromIndex = state.tabs.findIndex((tab) => tab.id === fromId);
|
||||
const toIndex = state.tabs.findIndex((tab) => tab.id === toId);
|
||||
if (fromIndex < 0 || toIndex < 0 || fromIndex === toIndex) {
|
||||
return state;
|
||||
}
|
||||
const nextTabs = [...state.tabs];
|
||||
const [movingTab] = nextTabs.splice(fromIndex, 1);
|
||||
nextTabs.splice(toIndex, 0, movingTab);
|
||||
return { tabs: nextTabs };
|
||||
}),
|
||||
|
||||
closeAllTabs: () => set(() => ({ tabs: [], activeTabId: null })),
|
||||
|
||||
setActiveTab: (id) => set({ activeTabId: id }),
|
||||
@@ -549,9 +797,22 @@ export const useStore = create<AppState>()(
|
||||
|
||||
setTheme: (theme) => set({ theme }),
|
||||
setAppearance: (appearance) => set((state) => ({ appearance: { ...state.appearance, ...appearance } })),
|
||||
setUiScale: (scale) => set({ uiScale: sanitizeUiScale(scale) }),
|
||||
setFontSize: (size) => set({ fontSize: sanitizeFontSize(size) }),
|
||||
setStartupFullscreen: (enabled) => set({ startupFullscreen: !!enabled }),
|
||||
setGlobalProxy: (proxy) => set((state) => ({ globalProxy: sanitizeGlobalProxy({ ...state.globalProxy, ...proxy }) })),
|
||||
setSqlFormatOptions: (options) => set({ sqlFormatOptions: options }),
|
||||
setQueryOptions: (options) => set((state) => ({ queryOptions: { ...state.queryOptions, ...options } })),
|
||||
updateShortcut: (action, binding) => set((state) => ({
|
||||
shortcutOptions: {
|
||||
...state.shortcutOptions,
|
||||
[action]: {
|
||||
...state.shortcutOptions[action],
|
||||
...binding,
|
||||
},
|
||||
},
|
||||
})),
|
||||
resetShortcutOptions: () => set({ shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS) }),
|
||||
|
||||
addSqlLog: (log) => set((state) => ({ sqlLogs: [log, ...state.sqlLogs].slice(0, 1000) })), // Keep last 1000 logs
|
||||
clearSqlLogs: () => set({ sqlLogs: [] }),
|
||||
@@ -576,22 +837,76 @@ export const useStore = create<AppState>()(
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
setTableColumnOrder: (connectionId, dbName, tableName, order) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
return {
|
||||
tableColumnOrders: {
|
||||
...state.tableColumnOrders,
|
||||
[key]: order
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
clearTableColumnOrder: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const newOrders = { ...state.tableColumnOrders };
|
||||
delete newOrders[key];
|
||||
return { tableColumnOrders: newOrders };
|
||||
}),
|
||||
|
||||
setEnableColumnOrderMemory: (enabled) => set({ enableColumnOrderMemory: !!enabled }),
|
||||
|
||||
setTableHiddenColumns: (connectionId, dbName, tableName, hiddenColumns) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
return {
|
||||
tableHiddenColumns: {
|
||||
...state.tableHiddenColumns,
|
||||
[key]: hiddenColumns
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
clearTableHiddenColumns: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const newHidden = { ...state.tableHiddenColumns };
|
||||
delete newHidden[key];
|
||||
return { tableHiddenColumns: newHidden };
|
||||
}),
|
||||
|
||||
setEnableHiddenColumnMemory: (enabled) => set({ enableHiddenColumnMemory: !!enabled }),
|
||||
}),
|
||||
{
|
||||
name: 'lite-db-storage', // name of the item in the storage (must be unique)
|
||||
version: 3,
|
||||
version: PERSIST_VERSION,
|
||||
migrate: (persistedState: unknown, version: number) => {
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
const nextState: Partial<AppState> = { ...state };
|
||||
nextState.connections = sanitizeConnections(state.connections);
|
||||
if (version < 5) {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
} else {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
}
|
||||
nextState.savedQueries = sanitizeSavedQueries(state.savedQueries);
|
||||
nextState.theme = sanitizeTheme(state.theme);
|
||||
nextState.appearance = sanitizeAppearance(state.appearance, version);
|
||||
nextState.uiScale = sanitizeUiScale(state.uiScale);
|
||||
nextState.fontSize = sanitizeFontSize(state.fontSize);
|
||||
nextState.startupFullscreen = sanitizeStartupFullscreen(state.startupFullscreen);
|
||||
nextState.globalProxy = sanitizeGlobalProxy(state.globalProxy);
|
||||
nextState.sqlFormatOptions = sanitizeSqlFormatOptions(state.sqlFormatOptions);
|
||||
nextState.queryOptions = sanitizeQueryOptions(state.queryOptions);
|
||||
nextState.shortcutOptions = sanitizeShortcutOptions(state.shortcutOptions);
|
||||
nextState.tableAccessCount = sanitizeTableAccessCount(state.tableAccessCount);
|
||||
nextState.tableSortPreference = sanitizeTableSortPreference(state.tableSortPreference);
|
||||
// 新增的列排序记忆状态不需要做版本特殊兼容,直接做基本的类型保护
|
||||
const safeOrders = sanitizeTableColumnOrders(state.tableColumnOrders);
|
||||
nextState.tableColumnOrders = safeOrders;
|
||||
nextState.enableColumnOrderMemory = state.enableColumnOrderMemory !== false;
|
||||
const safeHidden = sanitizeTableHiddenColumns(state.tableHiddenColumns);
|
||||
nextState.tableHiddenColumns = safeHidden;
|
||||
nextState.enableHiddenColumnMemory = state.enableHiddenColumnMemory !== false;
|
||||
return nextState as AppState;
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
@@ -600,26 +915,45 @@ export const useStore = create<AppState>()(
|
||||
...currentState,
|
||||
...state,
|
||||
connections: sanitizeConnections(state.connections),
|
||||
connectionTags: sanitizeConnectionTags(state.connectionTags),
|
||||
savedQueries: sanitizeSavedQueries(state.savedQueries),
|
||||
theme: sanitizeTheme(state.theme),
|
||||
appearance: sanitizeAppearance(state.appearance, 3),
|
||||
appearance: sanitizeAppearance(state.appearance, PERSIST_VERSION),
|
||||
uiScale: sanitizeUiScale(state.uiScale),
|
||||
fontSize: sanitizeFontSize(state.fontSize),
|
||||
startupFullscreen: sanitizeStartupFullscreen(state.startupFullscreen),
|
||||
globalProxy: sanitizeGlobalProxy(state.globalProxy),
|
||||
tableSortPreference: sanitizeTableSortPreference(state.tableSortPreference),
|
||||
tableColumnOrders: sanitizeTableColumnOrders(state.tableColumnOrders),
|
||||
enableColumnOrderMemory: state.enableColumnOrderMemory !== false,
|
||||
tableHiddenColumns: sanitizeTableHiddenColumns(state.tableHiddenColumns),
|
||||
enableHiddenColumnMemory: state.enableHiddenColumnMemory !== false,
|
||||
|
||||
sqlFormatOptions: sanitizeSqlFormatOptions(state.sqlFormatOptions),
|
||||
queryOptions: sanitizeQueryOptions(state.queryOptions),
|
||||
shortcutOptions: sanitizeShortcutOptions(state.shortcutOptions),
|
||||
tableAccessCount: sanitizeTableAccessCount(state.tableAccessCount),
|
||||
tableSortPreference: sanitizeTableSortPreference(state.tableSortPreference),
|
||||
};
|
||||
},
|
||||
partialize: (state) => ({
|
||||
connections: state.connections,
|
||||
connectionTags: state.connectionTags,
|
||||
savedQueries: state.savedQueries,
|
||||
theme: state.theme,
|
||||
appearance: state.appearance,
|
||||
uiScale: state.uiScale,
|
||||
fontSize: state.fontSize,
|
||||
startupFullscreen: state.startupFullscreen,
|
||||
globalProxy: state.globalProxy,
|
||||
sqlFormatOptions: state.sqlFormatOptions,
|
||||
queryOptions: state.queryOptions,
|
||||
shortcutOptions: state.shortcutOptions,
|
||||
tableAccessCount: state.tableAccessCount,
|
||||
tableSortPreference: state.tableSortPreference
|
||||
tableSortPreference: state.tableSortPreference,
|
||||
tableColumnOrders: state.tableColumnOrders,
|
||||
enableColumnOrderMemory: state.enableColumnOrderMemory,
|
||||
tableHiddenColumns: state.tableHiddenColumns,
|
||||
enableHiddenColumnMemory: state.enableHiddenColumnMemory
|
||||
}), // Don't persist logs
|
||||
}
|
||||
)
|
||||
|
||||
@@ -14,6 +14,13 @@ export interface ProxyConfig {
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface ConnectionConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
@@ -22,17 +29,23 @@ export interface ConnectionConfig {
|
||||
password?: string;
|
||||
savePassword?: boolean;
|
||||
database?: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: 'preferred' | 'required' | 'skip-verify' | 'disable';
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH?: boolean;
|
||||
ssh?: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number; // Redis database index (0-15)
|
||||
uri?: string; // Connection URI for copy/paste
|
||||
hosts?: string[]; // Multi-host addresses: host:port
|
||||
topology?: 'single' | 'replica';
|
||||
topology?: 'single' | 'replica' | 'cluster';
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
@@ -61,6 +74,12 @@ export interface SavedConnection {
|
||||
includeRedisDatabases?: number[]; // Redis databases to show (0-15)
|
||||
}
|
||||
|
||||
export interface ConnectionTag {
|
||||
id: string;
|
||||
name: string;
|
||||
connectionIds: string[];
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
name: string;
|
||||
type: string;
|
||||
@@ -109,6 +128,7 @@ export interface TabData {
|
||||
viewName?: string; // View name for view definition tabs
|
||||
routineName?: string; // Routine name for function/procedure definition tabs
|
||||
routineType?: string; // 'FUNCTION' or 'PROCEDURE'
|
||||
savedQueryId?: string; // Saved query identity for quick-save behavior
|
||||
}
|
||||
|
||||
export interface DatabaseNode {
|
||||
@@ -137,7 +157,7 @@ export interface RedisKeyInfo {
|
||||
|
||||
export interface RedisScanResult {
|
||||
keys: RedisKeyInfo[];
|
||||
cursor: number;
|
||||
cursor: string;
|
||||
}
|
||||
|
||||
export interface RedisValue {
|
||||
|
||||
@@ -10,6 +10,22 @@ const WINDOWS_BLUR_FACTOR = 1.00;
|
||||
|
||||
const clamp = (value: number, min: number, max: number) => Math.min(max, Math.max(min, value));
|
||||
|
||||
export interface AppearanceSettingsLike {
|
||||
enabled?: boolean;
|
||||
opacity?: number;
|
||||
blur?: number;
|
||||
}
|
||||
|
||||
export const resolveAppearanceValues = (appearance: AppearanceSettingsLike | undefined): { opacity: number; blur: number } => {
|
||||
if (!appearance || appearance.enabled !== false) {
|
||||
return {
|
||||
opacity: appearance?.opacity ?? DEFAULT_OPACITY,
|
||||
blur: appearance?.blur ?? 0,
|
||||
};
|
||||
}
|
||||
return { opacity: DEFAULT_OPACITY, blur: 0 };
|
||||
};
|
||||
|
||||
export const isMacLikePlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
|
||||
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { ConnectionConfig } from '../types';
|
||||
|
||||
type ConnectionLike = Pick<ConnectionConfig, 'type' | 'driver'> | null | undefined;
|
||||
|
||||
const normalizeDataSourceToken = (raw: string): string => {
|
||||
const normalized = String(raw || '').trim().toLowerCase();
|
||||
switch (normalized) {
|
||||
case 'doris':
|
||||
return 'diros';
|
||||
case 'postgresql':
|
||||
return 'postgres';
|
||||
case 'dm':
|
||||
return 'dameng';
|
||||
default:
|
||||
return normalized;
|
||||
}
|
||||
};
|
||||
|
||||
export const resolveDataSourceType = (config: ConnectionLike): string => {
|
||||
if (!config) return '';
|
||||
const type = normalizeDataSourceToken(String(config.type || ''));
|
||||
if (type === 'custom') {
|
||||
const driver = normalizeDataSourceToken(String(config.driver || ''));
|
||||
return driver || 'custom';
|
||||
}
|
||||
return type;
|
||||
};
|
||||
|
||||
const SQL_QUERY_EXPORT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const COPY_INSERT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const QUERY_EDITOR_DISABLED_TYPES = new Set(['redis']);
|
||||
const FORCE_READ_ONLY_QUERY_TYPES = new Set(['tdengine', 'clickhouse']);
|
||||
|
||||
export type DataSourceCapabilities = {
|
||||
type: string;
|
||||
supportsQueryEditor: boolean;
|
||||
supportsSqlQueryExport: boolean;
|
||||
supportsCopyInsert: boolean;
|
||||
forceReadOnlyQueryResult: boolean;
|
||||
};
|
||||
|
||||
export const getDataSourceCapabilities = (config: ConnectionLike): DataSourceCapabilities => {
|
||||
const type = resolveDataSourceType(config);
|
||||
return {
|
||||
type,
|
||||
supportsQueryEditor: !QUERY_EDITOR_DISABLED_TYPES.has(type),
|
||||
supportsSqlQueryExport: SQL_QUERY_EXPORT_TYPES.has(type),
|
||||
supportsCopyInsert: COPY_INSERT_TYPES.has(type),
|
||||
forceReadOnlyQueryResult: FORCE_READ_ONLY_QUERY_TYPES.has(type),
|
||||
};
|
||||
};
|
||||
|
||||
1014
frontend/src/utils/mongodb.ts
Normal file
1014
frontend/src/utils/mongodb.ts
Normal file
File diff suppressed because it is too large
Load Diff
27
frontend/src/utils/overlayWorkbenchTheme.test.ts
Normal file
27
frontend/src/utils/overlayWorkbenchTheme.test.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { buildOverlayWorkbenchTheme } from './overlayWorkbenchTheme';
|
||||
|
||||
const assertEqual = (actual: unknown, expected: unknown, message: string) => {
|
||||
if (actual !== expected) {
|
||||
throw new Error(`${message}\nactual: ${String(actual)}\nexpected: ${String(expected)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const assertMatch = (value: string, pattern: RegExp, message: string) => {
|
||||
if (!pattern.test(value)) {
|
||||
throw new Error(`${message}\nactual: ${value}\npattern: ${String(pattern)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const darkTheme = buildOverlayWorkbenchTheme(true);
|
||||
assertEqual(darkTheme.isDark, true, 'dark 主题标记应为 true');
|
||||
assertMatch(darkTheme.shellBg, /rgba\(15, 15, 17,/, 'dark 弹层背景应保持中性黑');
|
||||
assertMatch(darkTheme.sectionBg, /rgba\(255,?\s*255,?\s*255,?\s*0\.03\)/, 'dark section 背景透明度应匹配');
|
||||
assertEqual(darkTheme.iconColor, '#ffd666', 'dark 图标色应为金色强调');
|
||||
|
||||
const lightTheme = buildOverlayWorkbenchTheme(false);
|
||||
assertEqual(lightTheme.isDark, false, 'light 主题标记应为 false');
|
||||
assertMatch(lightTheme.shellBg, /rgba\(255,255,255,0\.98\)/, 'light 弹层背景透明度应匹配');
|
||||
assertMatch(lightTheme.sectionBg, /rgba\(255,?\s*255,?\s*255,?\s*0\.84\)/, 'light section 背景透明度应匹配');
|
||||
assertEqual(lightTheme.iconColor, '#1677ff', 'light 图标色应为蓝色强调');
|
||||
|
||||
console.log('overlayWorkbenchTheme tests passed');
|
||||
59
frontend/src/utils/overlayWorkbenchTheme.ts
Normal file
59
frontend/src/utils/overlayWorkbenchTheme.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
type OverlayWorkbenchTheme = {
|
||||
isDark: boolean;
|
||||
shellBg: string;
|
||||
shellBorder: string;
|
||||
shellShadow: string;
|
||||
shellBackdropFilter: string;
|
||||
sectionBg: string;
|
||||
sectionBorder: string;
|
||||
mutedText: string;
|
||||
titleText: string;
|
||||
iconBg: string;
|
||||
iconColor: string;
|
||||
hoverBg: string;
|
||||
selectedBg: string;
|
||||
selectedText: string;
|
||||
divider: string;
|
||||
};
|
||||
|
||||
export const buildOverlayWorkbenchTheme = (darkMode: boolean): OverlayWorkbenchTheme => {
|
||||
if (darkMode) {
|
||||
return {
|
||||
isDark: true,
|
||||
shellBg: 'linear-gradient(180deg, rgba(15, 15, 17, 0.96) 0%, rgba(11, 11, 13, 0.98) 100%)',
|
||||
shellBorder: '1px solid rgba(255,255,255,0.08)',
|
||||
shellShadow: '0 24px 56px rgba(0,0,0,0.34)',
|
||||
shellBackdropFilter: 'blur(18px)',
|
||||
sectionBg: 'rgba(255,255,255,0.03)',
|
||||
sectionBorder: '1px solid rgba(255,255,255,0.08)',
|
||||
mutedText: 'rgba(255,255,255,0.5)',
|
||||
titleText: '#f5f7ff',
|
||||
iconBg: 'rgba(255,214,102,0.12)',
|
||||
iconColor: '#ffd666',
|
||||
hoverBg: 'rgba(255,214,102,0.10)',
|
||||
selectedBg: 'rgba(255,214,102,0.14)',
|
||||
selectedText: '#ffd666',
|
||||
divider: 'rgba(255,255,255,0.08)',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isDark: false,
|
||||
shellBg: 'linear-gradient(180deg, rgba(255,255,255,0.98) 0%, rgba(246,248,252,0.98) 100%)',
|
||||
shellBorder: '1px solid rgba(16,24,40,0.08)',
|
||||
shellShadow: '0 18px 42px rgba(15,23,42,0.12)',
|
||||
shellBackdropFilter: 'none',
|
||||
sectionBg: 'rgba(255,255,255,0.84)',
|
||||
sectionBorder: '1px solid rgba(16,24,40,0.08)',
|
||||
mutedText: 'rgba(16,24,40,0.55)',
|
||||
titleText: '#162033',
|
||||
iconBg: 'rgba(24,144,255,0.1)',
|
||||
iconColor: '#1677ff',
|
||||
hoverBg: 'rgba(24,144,255,0.08)',
|
||||
selectedBg: 'rgba(24,144,255,0.12)',
|
||||
selectedText: '#1677ff',
|
||||
divider: 'rgba(16,24,40,0.08)',
|
||||
};
|
||||
};
|
||||
|
||||
export type { OverlayWorkbenchTheme };
|
||||
258
frontend/src/utils/shortcuts.ts
Normal file
258
frontend/src/utils/shortcuts.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import type { KeyboardEvent as ReactKeyboardEvent } from 'react';
|
||||
|
||||
export type ShortcutAction =
|
||||
| 'runQuery'
|
||||
| 'focusSidebarSearch'
|
||||
| 'newQueryTab'
|
||||
| 'toggleLogPanel'
|
||||
| 'toggleTheme'
|
||||
| 'openShortcutManager';
|
||||
|
||||
export interface ShortcutBinding {
|
||||
combo: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export type ShortcutOptions = Record<ShortcutAction, ShortcutBinding>;
|
||||
|
||||
export interface ShortcutActionMeta {
|
||||
label: string;
|
||||
description: string;
|
||||
allowInEditable?: boolean;
|
||||
}
|
||||
|
||||
const MODIFIER_ORDER = ['Ctrl', 'Meta', 'Alt', 'Shift'] as const;
|
||||
const MODIFIER_SET = new Set(MODIFIER_ORDER);
|
||||
|
||||
const KEY_ALIASES: Record<string, string> = {
|
||||
control: 'Ctrl',
|
||||
ctrl: 'Ctrl',
|
||||
command: 'Meta',
|
||||
cmd: 'Meta',
|
||||
meta: 'Meta',
|
||||
option: 'Alt',
|
||||
alt: 'Alt',
|
||||
shift: 'Shift',
|
||||
escape: 'Esc',
|
||||
esc: 'Esc',
|
||||
return: 'Enter',
|
||||
enter: 'Enter',
|
||||
tab: 'Tab',
|
||||
space: 'Space',
|
||||
' ': 'Space',
|
||||
backspace: 'Backspace',
|
||||
delete: 'Delete',
|
||||
del: 'Delete',
|
||||
arrowup: 'Up',
|
||||
up: 'Up',
|
||||
arrowdown: 'Down',
|
||||
down: 'Down',
|
||||
arrowleft: 'Left',
|
||||
left: 'Left',
|
||||
arrowright: 'Right',
|
||||
right: 'Right',
|
||||
pagedown: 'PageDown',
|
||||
pageup: 'PageUp',
|
||||
home: 'Home',
|
||||
end: 'End',
|
||||
insert: 'Insert',
|
||||
',': ',',
|
||||
'.': '.',
|
||||
'/': '/',
|
||||
';': ';',
|
||||
"'": "'",
|
||||
'[': '[',
|
||||
']': ']',
|
||||
'\\': '\\',
|
||||
'-': '-',
|
||||
'=': '=',
|
||||
'`': '`',
|
||||
};
|
||||
|
||||
export const SHORTCUT_ACTION_ORDER: ShortcutAction[] = [
|
||||
'runQuery',
|
||||
'focusSidebarSearch',
|
||||
'newQueryTab',
|
||||
'toggleLogPanel',
|
||||
'toggleTheme',
|
||||
'openShortcutManager',
|
||||
];
|
||||
|
||||
export const SHORTCUT_ACTION_META: Record<ShortcutAction, ShortcutActionMeta> = {
|
||||
runQuery: {
|
||||
label: '执行 SQL',
|
||||
description: '在当前查询页执行 SQL',
|
||||
},
|
||||
focusSidebarSearch: {
|
||||
label: '聚焦侧边栏搜索',
|
||||
description: '定位到左侧连接树搜索框',
|
||||
allowInEditable: true,
|
||||
},
|
||||
newQueryTab: {
|
||||
label: '新建查询页',
|
||||
description: '创建一个新的 SQL 查询标签页',
|
||||
},
|
||||
toggleLogPanel: {
|
||||
label: '切换日志面板',
|
||||
description: '打开或关闭 SQL 执行日志面板',
|
||||
},
|
||||
toggleTheme: {
|
||||
label: '切换主题',
|
||||
description: '在亮色和暗色主题之间切换',
|
||||
},
|
||||
openShortcutManager: {
|
||||
label: '打开快捷键管理',
|
||||
description: '打开快捷键设置面板',
|
||||
allowInEditable: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const DEFAULT_SHORTCUT_OPTIONS: ShortcutOptions = {
|
||||
runQuery: { combo: 'Ctrl+Shift+R', enabled: true },
|
||||
focusSidebarSearch: { combo: 'Ctrl+F', enabled: true },
|
||||
newQueryTab: { combo: 'Ctrl+Shift+N', enabled: true },
|
||||
toggleLogPanel: { combo: 'Ctrl+Shift+L', enabled: true },
|
||||
toggleTheme: { combo: 'Ctrl+Shift+D', enabled: true },
|
||||
openShortcutManager: { combo: 'Ctrl+,', enabled: true },
|
||||
};
|
||||
|
||||
const normalizeKeyToken = (value: string): string => {
|
||||
const token = String(value || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (token.length === 1) {
|
||||
return token === '+' ? '+' : token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1).toLowerCase() : token;
|
||||
};
|
||||
|
||||
export const normalizeShortcutCombo = (combo: string): string => {
|
||||
const raw = String(combo || '').trim();
|
||||
if (!raw) return '';
|
||||
|
||||
const pieces = raw
|
||||
.split('+')
|
||||
.map(part => part.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
const modifiers: string[] = [];
|
||||
let key = '';
|
||||
|
||||
pieces.forEach((part) => {
|
||||
const normalized = normalizeKeyToken(part);
|
||||
if (!normalized) return;
|
||||
if (MODIFIER_SET.has(normalized as typeof MODIFIER_ORDER[number])) {
|
||||
if (!modifiers.includes(normalized)) {
|
||||
modifiers.push(normalized);
|
||||
}
|
||||
return;
|
||||
}
|
||||
key = normalized;
|
||||
});
|
||||
|
||||
modifiers.sort((a, b) => MODIFIER_ORDER.indexOf(a as typeof MODIFIER_ORDER[number]) - MODIFIER_ORDER.indexOf(b as typeof MODIFIER_ORDER[number]));
|
||||
if (!key) {
|
||||
return modifiers.join('+');
|
||||
}
|
||||
return [...modifiers, key].join('+');
|
||||
};
|
||||
|
||||
const normalizeKeyboardKey = (key: string): string => {
|
||||
const token = String(key || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (token.length === 1) {
|
||||
if (token === ' ') return 'Space';
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1) : token;
|
||||
};
|
||||
|
||||
export const eventToShortcut = (event: KeyboardEvent | ReactKeyboardEvent): string => {
|
||||
const key = normalizeKeyboardKey(event.key);
|
||||
if (!key || MODIFIER_SET.has(key as typeof MODIFIER_ORDER[number])) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const modifiers: string[] = [];
|
||||
if (event.ctrlKey) modifiers.push('Ctrl');
|
||||
if (event.metaKey) modifiers.push('Meta');
|
||||
if (event.altKey) modifiers.push('Alt');
|
||||
if (event.shiftKey) modifiers.push('Shift');
|
||||
|
||||
return normalizeShortcutCombo([...modifiers, key].join('+'));
|
||||
};
|
||||
|
||||
export const isShortcutMatch = (event: KeyboardEvent | ReactKeyboardEvent, combo: string): boolean => {
|
||||
const expected = normalizeShortcutCombo(combo);
|
||||
if (!expected) return false;
|
||||
const actual = eventToShortcut(event);
|
||||
return actual === expected;
|
||||
};
|
||||
|
||||
export const hasModifierKey = (combo: string): boolean => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
if (!normalized) return false;
|
||||
return normalized.split('+').some(part => MODIFIER_SET.has(part as typeof MODIFIER_ORDER[number]));
|
||||
};
|
||||
|
||||
export const cloneShortcutOptions = (value: ShortcutOptions): ShortcutOptions => {
|
||||
return SHORTCUT_ACTION_ORDER.reduce((acc, action) => {
|
||||
acc[action] = {
|
||||
combo: normalizeShortcutCombo(value[action]?.combo || DEFAULT_SHORTCUT_OPTIONS[action].combo),
|
||||
enabled: value[action]?.enabled !== false,
|
||||
};
|
||||
return acc;
|
||||
}, {} as ShortcutOptions);
|
||||
};
|
||||
|
||||
export const sanitizeShortcutOptions = (value: unknown): ShortcutOptions => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const defaults = cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS);
|
||||
|
||||
SHORTCUT_ACTION_ORDER.forEach((action) => {
|
||||
const actionRaw = raw[action];
|
||||
if (!actionRaw || typeof actionRaw !== 'object') {
|
||||
return;
|
||||
}
|
||||
const binding = actionRaw as Record<string, unknown>;
|
||||
const combo = normalizeShortcutCombo(String(binding.combo || defaults[action].combo));
|
||||
defaults[action] = {
|
||||
combo: combo || defaults[action].combo,
|
||||
enabled: binding.enabled === false ? false : true,
|
||||
};
|
||||
});
|
||||
|
||||
return defaults;
|
||||
};
|
||||
|
||||
export const isEditableElement = (target: EventTarget | null): boolean => {
|
||||
if (!(target instanceof HTMLElement)) {
|
||||
return false;
|
||||
}
|
||||
const tag = target.tagName.toLowerCase();
|
||||
if (target.isContentEditable) {
|
||||
return true;
|
||||
}
|
||||
if (tag === 'input' || tag === 'textarea' || tag === 'select') {
|
||||
return true;
|
||||
}
|
||||
if (target.closest('.monaco-editor, .monaco-inputbox, .ant-select, .ant-picker, .ant-input')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export const getShortcutDisplay = (combo: string): string => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
return normalized || '-';
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
export type FilterCondition = {
|
||||
id?: number;
|
||||
enabled?: boolean;
|
||||
logic?: 'AND' | 'OR';
|
||||
column?: string;
|
||||
op?: string;
|
||||
value?: string;
|
||||
@@ -36,7 +37,7 @@ export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
if (!raw) return raw;
|
||||
const dbTypeLower = (dbType || '').toLowerCase();
|
||||
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine') {
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine' || dbTypeLower === 'clickhouse') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
|
||||
@@ -49,6 +50,11 @@ export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
return raw;
|
||||
}
|
||||
|
||||
// SQL Server 使用 [bracket] 标识符
|
||||
if (dbTypeLower === 'sqlserver' || dbTypeLower === 'mssql') {
|
||||
return `[${raw.replace(/]/g, ']]')}]`;
|
||||
}
|
||||
|
||||
// 其他数据库默认加双引号
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
@@ -133,6 +139,42 @@ export const buildOrderBySQL = (
|
||||
return '';
|
||||
};
|
||||
|
||||
export const buildPaginatedSelectSQL = (
|
||||
dbType: string,
|
||||
baseSql: string,
|
||||
orderBySQL: string,
|
||||
limit: number,
|
||||
offset: number,
|
||||
) => {
|
||||
const normalizedType = String(dbType || '').trim().toLowerCase();
|
||||
const safeLimit = Math.max(0, Math.floor(Number(limit) || 0));
|
||||
const safeOffset = Math.max(0, Math.floor(Number(offset) || 0));
|
||||
const base = String(baseSql || '').trim();
|
||||
const orderBy = String(orderBySQL || '');
|
||||
|
||||
if (!base || safeLimit <= 0) {
|
||||
return `${base}${orderBy}`;
|
||||
}
|
||||
|
||||
switch (normalizedType) {
|
||||
case 'oracle': {
|
||||
const orderedSql = `${base}${orderBy}`;
|
||||
const upperBound = safeOffset + safeLimit;
|
||||
if (safeOffset <= 0) {
|
||||
return `SELECT * FROM (${orderedSql}) WHERE ROWNUM <= ${upperBound}`;
|
||||
}
|
||||
return `SELECT * FROM (SELECT "__gonavi_page__".*, ROWNUM "__gonavi_rn__" FROM (${orderedSql}) "__gonavi_page__" WHERE ROWNUM <= ${upperBound}) WHERE "__gonavi_rn__" > ${safeOffset}`;
|
||||
}
|
||||
case 'sqlserver':
|
||||
case 'mssql': {
|
||||
const effectiveOrderBy = orderBy.trim() ? orderBy : ' ORDER BY (SELECT NULL)';
|
||||
return `${base}${effectiveOrderBy} OFFSET ${safeOffset} ROWS FETCH NEXT ${safeLimit} ROWS ONLY`;
|
||||
}
|
||||
default:
|
||||
return `${base}${orderBy} LIMIT ${safeLimit} OFFSET ${safeOffset}`;
|
||||
}
|
||||
};
|
||||
|
||||
export const parseListValues = (val: string) => {
|
||||
const raw = (val || '').trim();
|
||||
if (!raw) return [];
|
||||
@@ -142,8 +184,12 @@ export const parseListValues = (val: string) => {
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
const normalizeConditionLogic = (logic: unknown): 'AND' | 'OR' => {
|
||||
return String(logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND';
|
||||
};
|
||||
|
||||
export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) => {
|
||||
const whereParts: string[] = [];
|
||||
const whereParts: Array<{ expr: string; logic: 'AND' | 'OR' }> = [];
|
||||
|
||||
(conditions || []).forEach((cond) => {
|
||||
if (cond?.enabled === false) return;
|
||||
@@ -152,10 +198,17 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
const column = (cond?.column || '').trim();
|
||||
const value = (cond?.value ?? '').toString();
|
||||
const value2 = (cond?.value2 ?? '').toString();
|
||||
const logic = normalizeConditionLogic(cond?.logic);
|
||||
|
||||
const appendWherePart = (expr: string) => {
|
||||
const normalizedExpr = String(expr || '').trim();
|
||||
if (!normalizedExpr) return;
|
||||
whereParts.push({ expr: normalizedExpr, logic });
|
||||
};
|
||||
|
||||
if (op === 'CUSTOM') {
|
||||
const expr = value.trim();
|
||||
if (expr) whereParts.push(`(${expr})`);
|
||||
if (expr) appendWherePart(`(${expr})`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -165,80 +218,80 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
|
||||
switch (op) {
|
||||
case 'IS_NULL':
|
||||
whereParts.push(`${col} IS NULL`);
|
||||
appendWherePart(`${col} IS NULL`);
|
||||
return;
|
||||
case 'IS_NOT_NULL':
|
||||
whereParts.push(`${col} IS NOT NULL`);
|
||||
appendWherePart(`${col} IS NOT NULL`);
|
||||
return;
|
||||
case 'IS_EMPTY':
|
||||
// 兼容:空值通常理解为 NULL 或空字符串
|
||||
whereParts.push(`(${col} IS NULL OR ${col} = '')`);
|
||||
appendWherePart(`(${col} IS NULL OR ${col} = '')`);
|
||||
return;
|
||||
case 'IS_NOT_EMPTY':
|
||||
whereParts.push(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
appendWherePart(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
return;
|
||||
case 'BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} IN (${list})`);
|
||||
appendWherePart(`${col} IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} NOT IN (${list})`);
|
||||
appendWherePart(`${col} NOT IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case '=':
|
||||
@@ -249,7 +302,7 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
case '>=': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
@@ -257,16 +310,23 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
if (op.toUpperCase() === 'LIKE') {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : '';
|
||||
if (whereParts.length === 0) return '';
|
||||
|
||||
let whereExpr = `(${whereParts[0].expr})`;
|
||||
for (let i = 1; i < whereParts.length; i++) {
|
||||
const part = whereParts[i];
|
||||
whereExpr = `(${whereExpr} ${part.logic} (${part.expr}))`;
|
||||
}
|
||||
return `WHERE ${whereExpr}`;
|
||||
};
|
||||
|
||||
2
frontend/vite.config.d.ts
vendored
Normal file
2
frontend/vite.config.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: import("vite").UserConfig;
|
||||
export default _default;
|
||||
15
frontend/vite.config.js
Normal file
15
frontend/vite.config.js
Normal file
@@ -0,0 +1,15 @@
|
||||
import { defineConfig } from 'vite';
|
||||
import react from '@vitejs/plugin-react';
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {
|
||||
host: '127.0.0.1',
|
||||
port: 5173,
|
||||
strictPort: true,
|
||||
},
|
||||
build: {
|
||||
outDir: 'dist', // Standard Wails output directory
|
||||
emptyOutDir: true,
|
||||
}
|
||||
});
|
||||
@@ -5,6 +5,7 @@ import react from '@vitejs/plugin-react'
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
server: {
|
||||
host: '127.0.0.1',
|
||||
port: 5173,
|
||||
strictPort: true,
|
||||
},
|
||||
|
||||
33
frontend/wailsjs/go/app/App.d.ts
vendored
33
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -1,17 +1,26 @@
|
||||
// Cynhyrchwyd y ffeil hon yn awtomatig. PEIDIWCH Â MODIWL
|
||||
// This file is automatically generated. DO NOT EDIT
|
||||
import {connection} from '../models';
|
||||
import {time} from '../models';
|
||||
import {sync} from '../models';
|
||||
import {redis} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
export function CancelQuery(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function CancelSQLFileExecution(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckDriverNetworkStatus():Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckForUpdates():Promise<connection.QueryResult>;
|
||||
|
||||
export function CleanupStaleQueries(arg1:time.Duration):Promise<void>;
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ConfigureGlobalProxy(arg1:boolean,arg2:connection.ProxyConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function CreateDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -32,6 +41,12 @@ export function DBGetTriggers(arg1:connection.ConnectionConfig,arg2:string,arg3:
|
||||
|
||||
export function DBQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryIsolated(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryMulti(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryWithCancel(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
@@ -52,6 +67,8 @@ export function DropTable(arg1:connection.ConnectionConfig,arg2:string,arg3:stri
|
||||
|
||||
export function DropView(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExecuteSQLFile(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportData(arg1:Array<Record<string, any>>,arg2:Array<string>,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportDatabaseSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:boolean):Promise<connection.QueryResult>;
|
||||
@@ -64,6 +81,8 @@ export function ExportTablesDataSQL(arg1:connection.ConnectionConfig,arg2:string
|
||||
|
||||
export function ExportTablesSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>,arg4:boolean):Promise<connection.QueryResult>;
|
||||
|
||||
export function GenerateQueryID():Promise<string>;
|
||||
|
||||
export function GetAppInfo():Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverStatusList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
@@ -72,6 +91,8 @@ export function GetDriverVersionList(arg1:string,arg2:string):Promise<connection
|
||||
|
||||
export function GetDriverVersionPackageSize(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetGlobalProxyConfig():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportConfigFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportData(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
@@ -94,6 +115,8 @@ export function MySQLQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:str
|
||||
|
||||
export function MySQLShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenDownloadedUpdateDirectory():Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenSQLFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function PreviewImportFile(arg1:string):Promise<connection.QueryResult>;
|
||||
@@ -114,13 +137,15 @@ export function RedisGetServerInfo(arg1:connection.ConnectionConfig):Promise<con
|
||||
|
||||
export function RedisGetValue(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisKeyExists(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisListPush(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisListSet(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisRenameKey(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:number):Promise<connection.QueryResult>;
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:any,arg4:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSelectDB(arg1:connection.ConnectionConfig,arg2:number):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -158,8 +183,12 @@ export function ResolveDriverPackageDownloadURL(arg1:string,arg2:string):Promise
|
||||
|
||||
export function ResolveDriverRepositoryURL(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDatabaseFile(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectSSHKeyFile(arg1:string):Promise<connection.QueryResult>;
|
||||
@@ -167,3 +196,5 @@ export function SelectSSHKeyFile(arg1:string):Promise<connection.QueryResult>;
|
||||
export function SetWindowTranslucency(arg1:number,arg2:number):Promise<void>;
|
||||
|
||||
export function TestConnection(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function TruncateTables(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -6,6 +6,14 @@ export function ApplyChanges(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ApplyChanges'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function CancelQuery(arg1) {
|
||||
return window['go']['app']['App']['CancelQuery'](arg1);
|
||||
}
|
||||
|
||||
export function CancelSQLFileExecution(arg1) {
|
||||
return window['go']['app']['App']['CancelSQLFileExecution'](arg1);
|
||||
}
|
||||
|
||||
export function CheckDriverNetworkStatus() {
|
||||
return window['go']['app']['App']['CheckDriverNetworkStatus']();
|
||||
}
|
||||
@@ -14,10 +22,18 @@ export function CheckForUpdates() {
|
||||
return window['go']['app']['App']['CheckForUpdates']();
|
||||
}
|
||||
|
||||
export function CleanupStaleQueries(arg1) {
|
||||
return window['go']['app']['App']['CleanupStaleQueries'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1) {
|
||||
return window['go']['app']['App']['ConfigureDriverRuntimeDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureGlobalProxy(arg1, arg2) {
|
||||
return window['go']['app']['App']['ConfigureGlobalProxy'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function CreateDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['CreateDatabase'](arg1, arg2);
|
||||
}
|
||||
@@ -58,6 +74,18 @@ export function DBQuery(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQuery'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryIsolated(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQueryIsolated'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryMulti(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DBQueryMulti'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DBQueryWithCancel(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DBQueryWithCancel'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DBShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
@@ -98,6 +126,10 @@ export function DropView(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DropView'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExecuteSQLFile(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExecuteSQLFile'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function ExportData(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportData'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
@@ -122,6 +154,10 @@ export function ExportTablesSQL(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTablesSQL'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function GenerateQueryID() {
|
||||
return window['go']['app']['App']['GenerateQueryID']();
|
||||
}
|
||||
|
||||
export function GetAppInfo() {
|
||||
return window['go']['app']['App']['GetAppInfo']();
|
||||
}
|
||||
@@ -138,6 +174,10 @@ export function GetDriverVersionPackageSize(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverVersionPackageSize'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetGlobalProxyConfig() {
|
||||
return window['go']['app']['App']['GetGlobalProxyConfig']();
|
||||
}
|
||||
|
||||
export function ImportConfigFile() {
|
||||
return window['go']['app']['App']['ImportConfigFile']();
|
||||
}
|
||||
@@ -182,6 +222,10 @@ export function MySQLShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['MySQLShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function OpenDownloadedUpdateDirectory() {
|
||||
return window['go']['app']['App']['OpenDownloadedUpdateDirectory']();
|
||||
}
|
||||
|
||||
export function OpenSQLFile() {
|
||||
return window['go']['app']['App']['OpenSQLFile']();
|
||||
}
|
||||
@@ -222,6 +266,10 @@ export function RedisGetValue(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisGetValue'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisKeyExists(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisKeyExists'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisListPush(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisListPush'](arg1, arg2, arg3);
|
||||
}
|
||||
@@ -310,10 +358,18 @@ export function ResolveDriverRepositoryURL(arg1) {
|
||||
return window['go']['app']['App']['ResolveDriverRepositoryURL'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDatabaseFile(arg1, arg2) {
|
||||
return window['go']['app']['App']['SelectDatabaseFile'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverDownloadDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageFile(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageFile'](arg1);
|
||||
}
|
||||
@@ -329,3 +385,7 @@ export function SetWindowTranslucency(arg1, arg2) {
|
||||
export function TestConnection(arg1) {
|
||||
return window['go']['app']['App']['TestConnection'](arg1);
|
||||
}
|
||||
|
||||
export function TruncateTables(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['TruncateTables'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
@@ -48,6 +48,24 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
export class HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new HTTPTunnelConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.host = source["host"];
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
}
|
||||
}
|
||||
export class ProxyConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
@@ -96,10 +114,16 @@ export namespace connection {
|
||||
password: string;
|
||||
savePassword?: boolean;
|
||||
database: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: string;
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
@@ -130,10 +154,16 @@ export namespace connection {
|
||||
this.password = source["password"];
|
||||
this.savePassword = source["savePassword"];
|
||||
this.database = source["database"];
|
||||
this.useSSL = source["useSSL"];
|
||||
this.sslMode = source["sslMode"];
|
||||
this.sslCertPath = source["sslCertPath"];
|
||||
this.sslKeyPath = source["sslKeyPath"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.useProxy = source["useProxy"];
|
||||
this.proxy = this.convertValues(source["proxy"], ProxyConfig);
|
||||
this.useHttpTunnel = source["useHttpTunnel"];
|
||||
this.httpTunnel = this.convertValues(source["httpTunnel"], HTTPTunnelConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
@@ -171,11 +201,13 @@ export namespace connection {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class QueryResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: any;
|
||||
fields?: string[];
|
||||
queryId?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new QueryResult(source);
|
||||
@@ -187,6 +219,7 @@ export namespace connection {
|
||||
this.message = source["message"];
|
||||
this.data = source["data"];
|
||||
this.fields = source["fields"];
|
||||
this.queryId = source["queryId"];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,6 +277,9 @@ export namespace sync {
|
||||
mode: string;
|
||||
jobId?: string;
|
||||
autoAddColumns?: boolean;
|
||||
targetTableStrategy?: string;
|
||||
createIndexes?: boolean;
|
||||
mongoCollectionName?: string;
|
||||
tableOptions?: Record<string, TableOptions>;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
@@ -259,6 +295,9 @@ export namespace sync {
|
||||
this.mode = source["mode"];
|
||||
this.jobId = source["jobId"];
|
||||
this.autoAddColumns = source["autoAddColumns"];
|
||||
this.targetTableStrategy = source["targetTableStrategy"];
|
||||
this.createIndexes = source["createIndexes"];
|
||||
this.mongoCollectionName = source["mongoCollectionName"];
|
||||
this.tableOptions = this.convertValues(source["tableOptions"], TableOptions, true);
|
||||
}
|
||||
|
||||
|
||||
16
go.mod
16
go.mod
@@ -5,8 +5,10 @@ go 1.24.3
|
||||
require (
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3
|
||||
gitee.com/chunanyong/dm v1.8.22
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/microsoft/go-mssqldb v1.9.6
|
||||
@@ -15,6 +17,7 @@ require (
|
||||
github.com/taosdata/driver-go/v3 v3.7.8
|
||||
github.com/wailsapp/wails/v2 v2.11.0
|
||||
github.com/xuri/excelize/v2 v2.10.0
|
||||
go.mongodb.org/mongo-driver v1.17.9
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/mod v0.32.0
|
||||
@@ -25,6 +28,8 @@ require (
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.71.0 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.5.1 // indirect
|
||||
github.com/bep/debounce v1.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
@@ -36,6 +41,8 @@ require (
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-arm64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/windows-amd64 v0.3.3 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
@@ -44,9 +51,8 @@ require (
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/flatbuffers v25.12.19+incompatible // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.18.3 // indirect
|
||||
@@ -61,7 +67,9 @@ require (
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/paulmach/orb v0.12.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.25 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
@@ -70,6 +78,7 @@ require (
|
||||
github.com/richardlehane/msoleps v1.0.4 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/samber/lo v1.49.1 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/tiendc/go-deepcopy v1.7.1 // indirect
|
||||
github.com/tkrajina/go-reflector v0.5.8 // indirect
|
||||
@@ -84,6 +93,9 @@ require (
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/zeebo/xxh3 v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
|
||||
79
go.sum
79
go.sum
@@ -16,6 +16,10 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuo
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/ClickHouse/ch-go v0.71.0 h1:bUdZ/EZj/LcVHsMqaRUP2holqygrPWQKeMjc6nZoyRM=
|
||||
github.com/ClickHouse/ch-go v0.71.0/go.mod h1:NwbNc+7jaqfY58dmdDUbG4Jl22vThgx1cYjBw0vtgXw=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0 h1:fUR05TrF1GyvLDa/mAQjkx7KbgwdLRffs2n9O3WobtE=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0/go.mod h1:o6jf7JM/zveWC/PP277BLxjHy5KjnGX/jfljhM4s34g=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/apache/arrow-go/v18 v18.5.1 h1:yaQ6zxMGgf9YCYw4/oaeOU3AULySDlAYDOcnr4LdHdI=
|
||||
@@ -52,6 +56,10 @@ github.com/duckdb/duckdb-go/v2 v2.5.5 h1:TlK8ipnzoKW2aNrjGqRkFWLCDpJDxR/VwH8ezEc
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5/go.mod h1:6uIbC3gz36NCEygECzboygOo/Z9TeVwox/puG+ohWV0=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
|
||||
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
@@ -62,19 +70,23 @@ github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
|
||||
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible h1:haMV2JRRJCe1998HeW/p0X9UaMTK6SDo0ffLn2+DbLs=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
@@ -83,20 +95,29 @@ github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
|
||||
github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e h1:Q3+PugElBCf4PFpxhErSzU3/PY5sFL5Z6rfv4AbGAck=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw=
|
||||
github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/labstack/echo/v4 v4.13.3 h1:pwhpCPrTl5qry5HRdM5FwdXnhXSLSY+WE+YQSeCaafY=
|
||||
@@ -134,8 +155,16 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/paulmach/orb v0.12.0 h1:z+zOwjmG3MyEEqzv92UN49Lg1JFYx0L9GpGKNVDKk1s=
|
||||
github.com/paulmach/orb v0.12.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
|
||||
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
|
||||
github.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0=
|
||||
github.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
@@ -159,6 +188,8 @@ github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
|
||||
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
@@ -169,6 +200,7 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
@@ -176,6 +208,7 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8 h1:N2H6HLLZH2ve2ipcoFgG9BJS+yW0XksqNYwEdSmHaJk=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8/go.mod h1:gSxBEPOueMg0rTmMO1Ug6aeD7AwGdDGvUtLrsDTTpYc=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tiendc/go-deepcopy v1.7.1 h1:LnubftI6nYaaMOcaz0LphzwraqN8jiWTwm416sitff4=
|
||||
github.com/tiendc/go-deepcopy v1.7.1/go.mod h1:4bKjNC2r7boYOkD2IOuZpYjmlDdzjbpTRyCx+goBCJQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8 h1:yPADHrwmUbMq4RGEyaOUpz2H90sRsETNVpjzo3DLVQQ=
|
||||
@@ -192,8 +225,10 @@ github.com/wailsapp/wails/v2 v2.11.0 h1:seLacV8pqupq32IjS4Y7V8ucab0WZwtK6VvUVxSB
|
||||
github.com/wailsapp/wails/v2 v2.11.0/go.mod h1:jrf0ZaM6+GBc1wRmXsM8cIvzlg0karYin3erahI4+0k=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=
|
||||
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/xuri/efp v0.0.1 h1:fws5Rv3myXyYni8uwj2qKjVaRP30PdjeYe2Y6FDsCL8=
|
||||
@@ -202,38 +237,66 @@ github.com/xuri/excelize/v2 v2.10.0 h1:8aKsP7JD39iKLc6dH5Tw3dgV3sPRh8uRVXu/fMstf
|
||||
github.com/xuri/excelize/v2 v2.10.0/go.mod h1:SC5TzhQkaOsTWpANfm+7bJCldzcnU/jrhqkTi/iBHBU=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 h1:+C0TIdyyYmzadGaL/HBLbf3WdLgC29pgyhTjAT/0nuE=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.1.0 h1:s7DLGDK45Dyfg7++yxI0khrfwq9661w9EN78eP/UZVs=
|
||||
github.com/zeebo/xxh3 v1.1.0/go.mod h1:IisAie1LELR4xhVinxWS5+zf1lA4p0MW4T+w+W07F5s=
|
||||
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
||||
go.mongodb.org/mongo-driver v1.17.9 h1:IexDdCuuNJ3BHrELgBlyaH9p60JXAvdzWR128q+U5tU=
|
||||
go.mongodb.org/mongo-driver v1.17.9/go.mod h1:LlOhpH5NUEfhxcAwG0UEkMqwYcc4JU18gtCdGudk/tQ=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU=
|
||||
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
|
||||
golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -260,15 +323,25 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
@@ -16,6 +18,7 @@ import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const dbCachePingInterval = 30 * time.Second
|
||||
@@ -25,19 +28,27 @@ type cachedDatabase struct {
|
||||
lastPing time.Time
|
||||
}
|
||||
|
||||
type queryContext struct {
|
||||
cancel context.CancelFunc
|
||||
started time.Time
|
||||
}
|
||||
|
||||
// App struct
|
||||
type App struct {
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
queryMu sync.RWMutex
|
||||
runningQueries map[string]queryContext // queryID -> cancelFunc and start time
|
||||
}
|
||||
|
||||
// NewApp creates a new App application struct
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
runningQueries: make(map[string]queryContext),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,24 +85,142 @@ func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Close()
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
func normalizeCacheKeyConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := config
|
||||
normalized.Type = strings.ToLower(strings.TrimSpace(normalized.Type))
|
||||
// timeout 仅用于 Query/Ping 控制,不应作为物理连接复用键的一部分。
|
||||
normalized.Timeout = 0
|
||||
normalized.SavePassword = false
|
||||
|
||||
if !normalized.UseSSH {
|
||||
normalized.SSH = connection.SSHConfig{}
|
||||
}
|
||||
if !config.UseProxy {
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
if !normalized.UseProxy {
|
||||
normalized.Proxy = connection.ProxyConfig{}
|
||||
}
|
||||
if !normalized.UseHTTPTunnel {
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
b, _ := json.Marshal(config)
|
||||
if isFileDatabaseType(normalized.Type) {
|
||||
dsn := strings.TrimSpace(normalized.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(normalized.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
|
||||
// DuckDB/SQLite 仅基于文件来源识别连接,其他网络字段不参与键计算。
|
||||
normalized.Host = dsn
|
||||
normalized.Database = ""
|
||||
normalized.Port = 0
|
||||
normalized.User = ""
|
||||
normalized.Password = ""
|
||||
normalized.URI = ""
|
||||
normalized.Hosts = nil
|
||||
normalized.Topology = ""
|
||||
normalized.MySQLReplicaUser = ""
|
||||
normalized.MySQLReplicaPassword = ""
|
||||
normalized.ReplicaSet = ""
|
||||
normalized.AuthSource = ""
|
||||
normalized.ReadPreference = ""
|
||||
normalized.MongoSRV = false
|
||||
normalized.MongoAuthMechanism = ""
|
||||
normalized.MongoReplicaUser = ""
|
||||
normalized.MongoReplicaPassword = ""
|
||||
normalized.UseHTTPTunnel = false
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
func resolveFileDatabaseDSN(config connection.ConnectionConfig) string {
|
||||
dsn := strings.TrimSpace(config.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(config.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
return dsn
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func shortCacheKey(cacheKey string) string {
|
||||
shortKey := cacheKey
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
return shortKey
|
||||
}
|
||||
|
||||
func shouldRefreshCachedConnection(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
normalized := strings.ToLower(normalizeErrorMessage(err))
|
||||
if normalized == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
patterns := []string{
|
||||
"invalid connection",
|
||||
"bad connection",
|
||||
"database is closed",
|
||||
"connection is already closed",
|
||||
"use of closed network connection",
|
||||
"broken pipe",
|
||||
"connection reset by peer",
|
||||
"server has gone away",
|
||||
"eof",
|
||||
}
|
||||
for _, pattern := range patterns {
|
||||
if strings.Contains(normalized, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *App) invalidateCachedDatabase(config connection.ConnectionConfig, reason error) bool {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := shortCacheKey(key)
|
||||
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
|
||||
entry, exists := a.dbCache[key]
|
||||
if !exists || entry.inst == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if closeErr := entry.inst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
}
|
||||
delete(a.dbCache, key)
|
||||
if reason != nil {
|
||||
logger.Errorf("检测到连接失效,已清理缓存连接:%s 缓存Key=%s 原因=%s", formatConnSummary(effectiveConfig), shortKey, normalizeErrorMessage(reason))
|
||||
} else {
|
||||
logger.Infof("已清理缓存连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
err = sanitizeMongoConnectErrorLabel(config, err)
|
||||
|
||||
var netErr net.Error
|
||||
if errors.Is(err, context.DeadlineExceeded) || (errors.As(err, &netErr) && netErr.Timeout()) {
|
||||
@@ -105,6 +234,73 @@ func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
return withLogHint{err: err, logPath: logger.Path()}
|
||||
}
|
||||
|
||||
type errorMessageOverride struct {
|
||||
message string
|
||||
cause error
|
||||
}
|
||||
|
||||
func (e errorMessageOverride) Error() string {
|
||||
return e.message
|
||||
}
|
||||
|
||||
func (e errorMessageOverride) Unwrap() error {
|
||||
return e.cause
|
||||
}
|
||||
|
||||
func sanitizeMongoConnectErrorLabel(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
if strings.ToLower(strings.TrimSpace(config.Type)) != "mongodb" {
|
||||
return err
|
||||
}
|
||||
if mongoConnectUsesTLS(config) {
|
||||
return err
|
||||
}
|
||||
original := err.Error()
|
||||
rewritten := strings.ReplaceAll(original, "SSL 主库凭据", "主库凭据")
|
||||
rewritten = strings.ReplaceAll(rewritten, "SSL 从库凭据", "从库凭据")
|
||||
if rewritten == original {
|
||||
return err
|
||||
}
|
||||
return errorMessageOverride{
|
||||
message: rewritten,
|
||||
cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
func mongoConnectUsesTLS(config connection.ConnectionConfig) bool {
|
||||
if config.UseSSL {
|
||||
return true
|
||||
}
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return false
|
||||
}
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, key := range []string{"tls", "ssl"} {
|
||||
if enabled, known := parseMongoBool(parsed.Query().Get(key)); known {
|
||||
return enabled
|
||||
}
|
||||
}
|
||||
return strings.EqualFold(strings.TrimSpace(parsed.Scheme), "mongodb+srv")
|
||||
}
|
||||
|
||||
func parseMongoBool(raw string) (enabled bool, known bool) {
|
||||
value := strings.ToLower(strings.TrimSpace(raw))
|
||||
switch value {
|
||||
case "1", "true", "t", "yes", "y", "on", "required":
|
||||
return true, true
|
||||
case "0", "false", "f", "no", "n", "off", "disable", "disabled":
|
||||
return false, true
|
||||
default:
|
||||
return false, false
|
||||
}
|
||||
}
|
||||
|
||||
type withLogHint struct {
|
||||
err error
|
||||
logPath string
|
||||
@@ -112,10 +308,15 @@ type withLogHint struct {
|
||||
|
||||
func (e withLogHint) Error() string {
|
||||
message := normalizeErrorMessage(e.err)
|
||||
if strings.TrimSpace(e.logPath) == "" {
|
||||
path := strings.TrimSpace(e.logPath)
|
||||
if path == "" {
|
||||
return message
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", message, e.logPath)
|
||||
info, statErr := os.Stat(path)
|
||||
if statErr != nil || info.IsDir() || info.Size() <= 0 {
|
||||
return message
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", message, path)
|
||||
}
|
||||
|
||||
func (e withLogHint) Unwrap() error {
|
||||
@@ -182,6 +383,12 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(fmt.Sprintf(" HTTP隧道=%s:%d", strings.TrimSpace(config.HTTPTunnel.Host), config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
if config.Type == "custom" {
|
||||
driver := strings.TrimSpace(config.Driver)
|
||||
@@ -207,16 +414,51 @@ func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, erro
|
||||
return a.getDatabaseWithPing(config, false)
|
||||
}
|
||||
|
||||
func (a *App) openDatabaseIsolated(config connection.ConnectionConfig) (db.Database, error) {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
return nil, withLogHint{err: fmt.Errorf("%s", reason), logPath: logger.Path()}
|
||||
}
|
||||
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, proxyErr)
|
||||
}
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, err)
|
||||
}
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing bool) (db.Database, error) {
|
||||
key := getCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
isFileDB := isFileDatabaseType(effectiveConfig.Type)
|
||||
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
if isFileDB {
|
||||
rawDSN := resolveFileDatabaseDSN(effectiveConfig)
|
||||
normalizedDSN := resolveFileDatabaseDSN(normalizeCacheKeyConfig(effectiveConfig))
|
||||
logger.Infof("文件库连接缓存探测:类型=%s 原始DSN=%s 归一化DSN=%s timeout=%ds forcePing=%t 缓存Key=%s",
|
||||
strings.TrimSpace(effectiveConfig.Type), rawDSN, normalizedDSN, effectiveConfig.Timeout, forcePing, shortKey)
|
||||
}
|
||||
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(config.Type); !supported {
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(config.Type))
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
// Best-effort cleanup: if cached instance exists for this exact config, close it.
|
||||
a.mu.Lock()
|
||||
@@ -232,6 +474,9 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
entry, ok := a.dbCache[key]
|
||||
a.mu.RUnlock()
|
||||
if ok {
|
||||
if isFileDB {
|
||||
logger.Infof("命中文件库连接缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
needPing := forcePing
|
||||
if !needPing {
|
||||
lastPing := entry.lastPing
|
||||
@@ -241,6 +486,9 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
}
|
||||
|
||||
if !needPing {
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(免 Ping):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
}
|
||||
|
||||
@@ -252,9 +500,12 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.dbCache[key] = cur
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(Ping 成功):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
|
||||
// Ping failed: remove cached instance (best effort)
|
||||
@@ -266,26 +517,32 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("文件库缓存连接已剔除,准备新建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
}
|
||||
if isFileDB {
|
||||
logger.Infof("未命中文件库连接缓存,开始创建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(config.Type)
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(config)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(config, proxyErr)
|
||||
logger.Error(wrapped, "连接代理准备失败:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "连接代理准备失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(config, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
@@ -296,11 +553,54 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.mu.Unlock()
|
||||
// Prefer existing cached connection to avoid cache racing duplicates.
|
||||
_ = dbInst.Close()
|
||||
if isFileDB {
|
||||
logger.Infof("并发创建命中已存在文件库连接,关闭新建连接并复用缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return existing.inst, nil
|
||||
}
|
||||
a.dbCache[key] = cachedDatabase{inst: dbInst, lastPing: now}
|
||||
a.mu.Unlock()
|
||||
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
// generateQueryID generates a unique ID for a query using UUID v4
|
||||
func generateQueryID() string {
|
||||
return "query-" + uuid.New().String()
|
||||
}
|
||||
|
||||
// CancelQuery cancels a running query by its ID
|
||||
func (a *App) CancelQuery(queryID string) connection.QueryResult {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
if ctx, exists := a.runningQueries[queryID]; exists {
|
||||
ctx.cancel()
|
||||
delete(a.runningQueries, queryID)
|
||||
logger.Infof("查询已取消:queryID=%s", queryID)
|
||||
return connection.QueryResult{Success: true, Message: "查询已取消"}
|
||||
}
|
||||
logger.Warnf("取消查询失败:queryID=%s 不存在或已完成", queryID)
|
||||
return connection.QueryResult{Success: false, Message: "查询不存在或已完成"}
|
||||
}
|
||||
|
||||
// CleanupStaleQueries removes queries older than maxAge
|
||||
func (a *App) CleanupStaleQueries(maxAge time.Duration) {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
now := time.Now()
|
||||
for id, ctx := range a.runningQueries {
|
||||
if now.Sub(ctx.started) > maxAge {
|
||||
// Query likely finished or stuck, remove from tracking
|
||||
delete(a.runningQueries, id)
|
||||
// Query expired, silently remove
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateQueryID generates a unique query ID for cancellation tracking
|
||||
func (a *App) GenerateQueryID() string {
|
||||
return generateQueryID()
|
||||
}
|
||||
|
||||
63
internal/app/app_cache_key_test.go
Normal file
63
internal/app/app_cache_key_test.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGetCacheKey_IgnoreTimeout(t *testing.T) {
|
||||
base := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `C:\data\songs.duckdb`,
|
||||
Timeout: 30,
|
||||
UseProxy: false,
|
||||
UseSSH: false,
|
||||
}
|
||||
modified := base
|
||||
modified.Timeout = 120
|
||||
|
||||
left := getCacheKey(base)
|
||||
right := getCacheKey(modified)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key when only timeout differs, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_DuckDBHostAndDatabaseEquivalent(t *testing.T) {
|
||||
withHost := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `D:\music\songs.duckdb`,
|
||||
}
|
||||
withDatabase := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Database: `D:\music\songs.duckdb`,
|
||||
}
|
||||
|
||||
left := getCacheKey(withHost)
|
||||
right := getCacheKey(withDatabase)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key for duckdb host/database path, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_KeepDatabaseIsolation(t *testing.T) {
|
||||
a := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Password: "root",
|
||||
Database: "db_a",
|
||||
Timeout: 30,
|
||||
}
|
||||
b := a
|
||||
b.Database = "db_b"
|
||||
b.Timeout = 5
|
||||
|
||||
left := getCacheKey(a)
|
||||
right := getCacheKey(b)
|
||||
if left == right {
|
||||
t.Fatalf("expected different cache key for different database targets")
|
||||
}
|
||||
}
|
||||
84
internal/app/app_connect_error_test.go
Normal file
84
internal/app/app_connect_error_test.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestWrapConnectError_MongoNoSSL_RemovesMisleadingSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to be removed when TLS disabled, got: %s", text)
|
||||
}
|
||||
if !strings.Contains(text, "主库凭据验证失败") {
|
||||
t.Fatalf("expected auth label to remain, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrapConnectError_MongoURIForcesTLS_KeepsSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
URI: "mongodb://user:pass@127.0.0.1:27017/admin?tls=true",
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if !strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to remain when URI enables TLS, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrapConnectError_MongoSRVDefaultTLS_KeepsSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
URI: "mongodb+srv://user:pass@cluster0.example.com/admin",
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if !strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to remain for mongodb+srv default TLS, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWithLogHintError_OmitEmptyLogPath(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logPath := filepath.Join(dir, "gonavi.log")
|
||||
if err := os.WriteFile(logPath, nil, 0o644); err != nil {
|
||||
t.Fatalf("write empty log failed: %v", err)
|
||||
}
|
||||
err := withLogHint{err: errors.New("连接失败"), logPath: logPath}
|
||||
text := err.Error()
|
||||
if strings.Contains(text, "详细日志:") {
|
||||
t.Fatalf("expected no log hint for empty file, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWithLogHintError_IncludeNonEmptyLogPath(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logPath := filepath.Join(dir, "gonavi.log")
|
||||
if err := os.WriteFile(logPath, []byte("log entry\n"), 0o644); err != nil {
|
||||
t.Fatalf("write log failed: %v", err)
|
||||
}
|
||||
err := withLogHint{err: errors.New("连接失败"), logPath: logPath}
|
||||
text := err.Error()
|
||||
if !strings.Contains(text, "详细日志:"+logPath) {
|
||||
t.Fatalf("expected log hint with path, got: %s", text)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -14,12 +15,17 @@ func normalizeRunConfig(config connection.ConnectionConfig, dbName string) conne
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine", "clickhouse":
|
||||
// 这些类型的 dbName 表示"数据库",需要写入连接配置以选择目标库。
|
||||
runConfig.Database = name
|
||||
case "dameng":
|
||||
// 达梦使用 schema 参数,沿用现有行为:dbName 表示 schema。
|
||||
runConfig.Database = name
|
||||
case "redis":
|
||||
runConfig.Database = name
|
||||
if idx, err := strconv.Atoi(name); err == nil && idx >= 0 && idx <= 15 {
|
||||
runConfig.RedisDB = idx
|
||||
}
|
||||
default:
|
||||
// oracle: dbName 表示 schema/owner,不能覆盖 config.Database(服务名)
|
||||
// sqlite: 无需设置 Database
|
||||
@@ -36,6 +42,17 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
dbType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if dbType == "sqlserver" {
|
||||
// SQL Server 的 DB 接口约定:第一个参数是数据库名,schema 由 tableName(如 dbo.users) 自行解析。
|
||||
// 不能把 schema(dbo) 传到第一个参数,否则会拼出 dbo.sys.columns 等无效对象名。
|
||||
targetDB := rawDB
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(config.Database)
|
||||
}
|
||||
return targetDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
@@ -44,13 +61,10 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// PG/金仓/瀚高/海量:dbName 在 UI 里是"数据库",schema 需从 tableName 或使用默认 public。
|
||||
return "public", rawTable
|
||||
case "sqlserver":
|
||||
// SQL Server:dbName 表示数据库,schema 默认 dbo
|
||||
return "dbo", rawTable
|
||||
default:
|
||||
// MySQL:dbName 表示数据库;Oracle/达梦:dbName 表示 schema/owner。
|
||||
return rawDB, rawTable
|
||||
|
||||
51
internal/app/db_context_test.go
Normal file
51
internal/app/db_context_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerKeepsDatabaseAndQualifiedTable(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "master",
|
||||
}, "biz_db", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver first return value as database name, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerFallbackToConfigDatabase(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "biz_db",
|
||||
}, "", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver fallback database from config, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_PostgresStillSplitsQualifiedName(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schema, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "public.orders")
|
||||
|
||||
if schema != "public" || table != "orders" {
|
||||
t.Fatalf("expected postgres qualified split to public.orders, got %q.%q", schema, table)
|
||||
}
|
||||
}
|
||||
@@ -12,8 +12,35 @@ import (
|
||||
|
||||
func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.ConnectionConfig, error) {
|
||||
config := raw
|
||||
if config.UseHTTPTunnel {
|
||||
if config.UseProxy {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道与普通代理不能同时启用")
|
||||
}
|
||||
tunnelHost := strings.TrimSpace(config.HTTPTunnel.Host)
|
||||
if tunnelHost == "" {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道主机不能为空")
|
||||
}
|
||||
tunnelPort := config.HTTPTunnel.Port
|
||||
if tunnelPort <= 0 {
|
||||
tunnelPort = 8080
|
||||
}
|
||||
if tunnelPort > 65535 {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道端口无效:%d", config.HTTPTunnel.Port)
|
||||
}
|
||||
|
||||
config.UseProxy = true
|
||||
config.Proxy = connection.ProxyConfig{
|
||||
Type: "http",
|
||||
Host: tunnelHost,
|
||||
Port: tunnelPort,
|
||||
User: strings.TrimSpace(config.HTTPTunnel.User),
|
||||
Password: config.HTTPTunnel.Password,
|
||||
}
|
||||
}
|
||||
if !config.UseProxy {
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@@ -22,6 +49,8 @@ func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.Con
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Proxy = normalizedProxy
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
|
||||
if config.UseSSH {
|
||||
sshPort := config.SSH.Port
|
||||
@@ -44,8 +73,8 @@ func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.Con
|
||||
// 文件型/自定义 DSN 类型不走标准 host:port,不在此层改写。
|
||||
return config, nil
|
||||
}
|
||||
if normalizedType == "mongodb" && config.MongoSRV {
|
||||
// Mongo SRV 由驱动侧 Dialer 处理代理,避免破坏 DNS SRV 拓扑发现。
|
||||
if normalizedType == "mongodb" {
|
||||
// MongoDB 统一由驱动侧 Dialer 处理代理,保留原始目标地址,避免将连接目标改写为本地转发地址。
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@@ -194,6 +223,8 @@ func defaultPortByType(driverType string) int {
|
||||
return 1433
|
||||
case "mongodb":
|
||||
return 27017
|
||||
case "clickhouse":
|
||||
return 9000
|
||||
case "highgo":
|
||||
return 5866
|
||||
default:
|
||||
|
||||
64
internal/app/db_proxy_test.go
Normal file
64
internal/app/db_proxy_test.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestResolveDialConfigWithProxy_MongoKeepsTargetAddress(t *testing.T) {
|
||||
hosts := []string{"10.20.30.40:27017", "10.20.30.41:27017"}
|
||||
raw := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
Host: "10.20.30.40",
|
||||
Port: 27017,
|
||||
UseProxy: true,
|
||||
Proxy: connection.ProxyConfig{
|
||||
Type: "socks5",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1080,
|
||||
},
|
||||
Hosts: hosts,
|
||||
}
|
||||
|
||||
got, err := resolveDialConfigWithProxy(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("resolveDialConfigWithProxy returned error: %v", err)
|
||||
}
|
||||
if got.Host != raw.Host || got.Port != raw.Port {
|
||||
t.Fatalf("mongo target address should be kept, got=%s:%d want=%s:%d", got.Host, got.Port, raw.Host, raw.Port)
|
||||
}
|
||||
if !got.UseProxy {
|
||||
t.Fatalf("mongo should keep UseProxy=true for driver-level dialer")
|
||||
}
|
||||
if !reflect.DeepEqual(got.Hosts, hosts) {
|
||||
t.Fatalf("mongo hosts should be kept, got=%v want=%v", got.Hosts, hosts)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveDialConfigWithProxy_MongoSRVKeepsTargetAddress(t *testing.T) {
|
||||
raw := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
Host: "cluster0.example.com",
|
||||
Port: 27017,
|
||||
MongoSRV: true,
|
||||
UseProxy: true,
|
||||
Proxy: connection.ProxyConfig{
|
||||
Type: "http",
|
||||
Host: "127.0.0.1",
|
||||
Port: 7890,
|
||||
},
|
||||
}
|
||||
|
||||
got, err := resolveDialConfigWithProxy(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("resolveDialConfigWithProxy returned error: %v", err)
|
||||
}
|
||||
if got.Host != raw.Host || got.Port != raw.Port {
|
||||
t.Fatalf("mongo SRV target address should be kept, got=%s:%d want=%s:%d", got.Host, got.Port, raw.Host, raw.Port)
|
||||
}
|
||||
if !got.UseProxy {
|
||||
t.Fatalf("mongo SRV should keep UseProxy=true for driver-level dialer")
|
||||
}
|
||||
}
|
||||
314
internal/app/global_proxy.go
Normal file
314
internal/app/global_proxy.go
Normal file
@@ -0,0 +1,314 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
type globalProxySnapshot struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Proxy connection.ProxyConfig `json:"proxy"`
|
||||
}
|
||||
|
||||
var globalProxyRuntime = struct {
|
||||
mu sync.RWMutex
|
||||
enabled bool
|
||||
proxy connection.ProxyConfig
|
||||
}{}
|
||||
|
||||
type localProxyTLSFallbackTransport struct {
|
||||
primary *http.Transport
|
||||
fallback *http.Transport
|
||||
proxyEndpoint string
|
||||
}
|
||||
|
||||
func currentGlobalProxyConfig() globalProxySnapshot {
|
||||
globalProxyRuntime.mu.RLock()
|
||||
defer globalProxyRuntime.mu.RUnlock()
|
||||
if !globalProxyRuntime.enabled {
|
||||
return globalProxySnapshot{
|
||||
Enabled: false,
|
||||
Proxy: connection.ProxyConfig{},
|
||||
}
|
||||
}
|
||||
return globalProxySnapshot{
|
||||
Enabled: true,
|
||||
Proxy: globalProxyRuntime.proxy,
|
||||
}
|
||||
}
|
||||
|
||||
func setGlobalProxyConfig(enabled bool, proxyConfig connection.ProxyConfig) (globalProxySnapshot, error) {
|
||||
if !enabled {
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = false
|
||||
globalProxyRuntime.proxy = connection.ProxyConfig{}
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return globalProxySnapshot{}, err
|
||||
}
|
||||
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = true
|
||||
globalProxyRuntime.proxy = normalizedProxy
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
func (a *App) ConfigureGlobalProxy(enabled bool, proxyConfig connection.ProxyConfig) connection.QueryResult {
|
||||
before := currentGlobalProxyConfig()
|
||||
snapshot, err := setGlobalProxyConfig(enabled, proxyConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
// 前端可能在同一配置下重复触发同步(例如严格模式或状态回放),
|
||||
// 这里做幂等日志,避免重复刷屏。
|
||||
if !globalProxySnapshotEqual(before, snapshot) {
|
||||
if snapshot.Enabled {
|
||||
authState := ""
|
||||
if strings.TrimSpace(snapshot.Proxy.User) != "" {
|
||||
authState = "(认证:已配置)"
|
||||
}
|
||||
logger.Infof(
|
||||
"全局代理已启用:%s://%s:%d%s",
|
||||
strings.ToLower(strings.TrimSpace(snapshot.Proxy.Type)),
|
||||
strings.TrimSpace(snapshot.Proxy.Host),
|
||||
snapshot.Proxy.Port,
|
||||
authState,
|
||||
)
|
||||
} else {
|
||||
logger.Infof("全局代理已关闭")
|
||||
}
|
||||
}
|
||||
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "全局代理配置已生效",
|
||||
Data: snapshot,
|
||||
}
|
||||
}
|
||||
|
||||
func globalProxySnapshotEqual(a, b globalProxySnapshot) bool {
|
||||
if a.Enabled != b.Enabled {
|
||||
return false
|
||||
}
|
||||
if !a.Enabled {
|
||||
return true
|
||||
}
|
||||
return proxyConfigEqual(a.Proxy, b.Proxy)
|
||||
}
|
||||
|
||||
func proxyConfigEqual(a, b connection.ProxyConfig) bool {
|
||||
return strings.EqualFold(strings.TrimSpace(a.Type), strings.TrimSpace(b.Type)) &&
|
||||
strings.TrimSpace(a.Host) == strings.TrimSpace(b.Host) &&
|
||||
a.Port == b.Port &&
|
||||
strings.TrimSpace(a.User) == strings.TrimSpace(b.User) &&
|
||||
a.Password == b.Password
|
||||
}
|
||||
|
||||
func (a *App) GetGlobalProxyConfig() connection.QueryResult {
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "OK",
|
||||
Data: currentGlobalProxyConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
func applyGlobalProxyToConnection(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
effective := config
|
||||
if effective.UseProxy || effective.UseHTTPTunnel {
|
||||
return effective
|
||||
}
|
||||
if isFileDatabaseType(effective.Type) {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
effective.UseProxy = true
|
||||
effective.Proxy = snapshot.Proxy
|
||||
return effective
|
||||
}
|
||||
|
||||
func isFileDatabaseType(driverType string) bool {
|
||||
switch strings.ToLower(strings.TrimSpace(driverType)) {
|
||||
case "sqlite", "duckdb":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func newHTTPClientWithGlobalProxy(timeout time.Duration) *http.Client {
|
||||
client := &http.Client{
|
||||
Timeout: timeout,
|
||||
}
|
||||
if transport := buildHTTPTransportWithGlobalProxy(); transport != nil {
|
||||
client.Transport = transport
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
func buildHTTPTransportWithGlobalProxy() http.RoundTripper {
|
||||
baseTransport, ok := http.DefaultTransport.(*http.Transport)
|
||||
if !ok || baseTransport == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
transport := baseTransport.Clone()
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
proxyURL, err := buildProxyURLFromConfig(snapshot.Proxy)
|
||||
if err != nil {
|
||||
logger.Warnf("全局代理配置无效,回退系统代理:%v", err)
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
transport.Proxy = http.ProxyURL(proxyURL)
|
||||
if !isLoopbackProxyHost(snapshot.Proxy.Host) {
|
||||
return transport
|
||||
}
|
||||
|
||||
fallbackTransport := transport.Clone()
|
||||
fallbackTransport.TLSClientConfig = cloneTLSConfigWithInsecureSkipVerify(fallbackTransport.TLSClientConfig)
|
||||
return &localProxyTLSFallbackTransport{
|
||||
primary: transport,
|
||||
fallback: fallbackTransport,
|
||||
proxyEndpoint: proxyURL.Redacted(),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *localProxyTLSFallbackTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
resp, err := t.primary.RoundTrip(req)
|
||||
if err == nil {
|
||||
return resp, nil
|
||||
}
|
||||
if !isTLSFallbackCandidate(req.Method, err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
retryReq, cloneErr := cloneRequestForRetry(req)
|
||||
if cloneErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
logger.Warnf("检测到本地代理 TLS 证书不受信任,启用兼容回退:代理=%s 目标=%s 错误=%v", t.proxyEndpoint, req.URL.String(), err)
|
||||
return t.fallback.RoundTrip(retryReq)
|
||||
}
|
||||
|
||||
func isTLSFallbackCandidate(method string, err error) bool {
|
||||
if !isIdempotentRequestMethod(method) {
|
||||
return false
|
||||
}
|
||||
return isUnknownAuthorityError(err)
|
||||
}
|
||||
|
||||
func isIdempotentRequestMethod(method string) bool {
|
||||
switch strings.ToUpper(strings.TrimSpace(method)) {
|
||||
case http.MethodGet, http.MethodHead:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func cloneRequestForRetry(req *http.Request) (*http.Request, error) {
|
||||
cloned := req.Clone(req.Context())
|
||||
if req.Body == nil || req.Body == http.NoBody {
|
||||
return cloned, nil
|
||||
}
|
||||
if req.GetBody == nil {
|
||||
return nil, fmt.Errorf("request body not replayable")
|
||||
}
|
||||
body, err := req.GetBody()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cloned.Body = body
|
||||
return cloned, nil
|
||||
}
|
||||
|
||||
func isUnknownAuthorityError(err error) bool {
|
||||
var unknownErr x509.UnknownAuthorityError
|
||||
if errors.As(err, &unknownErr) {
|
||||
return true
|
||||
}
|
||||
return strings.Contains(strings.ToLower(err.Error()), "x509: certificate signed by unknown authority")
|
||||
}
|
||||
|
||||
func cloneTLSConfigWithInsecureSkipVerify(base *tls.Config) *tls.Config {
|
||||
if base == nil {
|
||||
return &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
cloned := base.Clone()
|
||||
cloned.InsecureSkipVerify = true
|
||||
return cloned
|
||||
}
|
||||
|
||||
func isLoopbackProxyHost(host string) bool {
|
||||
trimmed := strings.TrimSpace(host)
|
||||
if trimmed == "" {
|
||||
return false
|
||||
}
|
||||
if strings.EqualFold(trimmed, "localhost") {
|
||||
return true
|
||||
}
|
||||
ip := net.ParseIP(trimmed)
|
||||
if ip == nil {
|
||||
return false
|
||||
}
|
||||
return ip.IsLoopback()
|
||||
}
|
||||
|
||||
func buildProxyURLFromConfig(proxyConfig connection.ProxyConfig) (*url.URL, error) {
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxyType := strings.ToLower(strings.TrimSpace(normalizedProxy.Type))
|
||||
if proxyType != "http" && proxyType != "socks5" {
|
||||
return nil, fmt.Errorf("不支持的代理类型:%s", normalizedProxy.Type)
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.Host) == "" {
|
||||
return nil, fmt.Errorf("代理地址不能为空")
|
||||
}
|
||||
if normalizedProxy.Port <= 0 || normalizedProxy.Port > 65535 {
|
||||
return nil, fmt.Errorf("代理端口无效:%d", normalizedProxy.Port)
|
||||
}
|
||||
|
||||
proxyURL := &url.URL{
|
||||
Scheme: proxyType,
|
||||
Host: net.JoinHostPort(strings.TrimSpace(normalizedProxy.Host), strconv.Itoa(normalizedProxy.Port)),
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.User) != "" {
|
||||
proxyURL.User = url.UserPassword(strings.TrimSpace(normalizedProxy.User), normalizedProxy.Password)
|
||||
}
|
||||
return proxyURL, nil
|
||||
}
|
||||
@@ -3,14 +3,26 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
)
|
||||
|
||||
const testConnectionTimeoutUpperBoundSeconds = 12
|
||||
|
||||
func normalizeTestConnectionConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := config
|
||||
if normalized.Timeout <= 0 || normalized.Timeout > testConnectionTimeoutUpperBoundSeconds {
|
||||
normalized.Timeout = testConnectionTimeoutUpperBoundSeconds
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
// Generic DB Methods
|
||||
|
||||
func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
@@ -26,13 +38,16 @@ func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResu
|
||||
}
|
||||
|
||||
func (a *App) TestConnection(config connection.ConnectionConfig) connection.QueryResult {
|
||||
_, err := a.getDatabaseForcePing(config)
|
||||
testConfig := normalizeTestConnectionConfig(config)
|
||||
started := time.Now()
|
||||
logger.Infof("TestConnection 开始:%s", formatConnSummary(testConfig))
|
||||
_, err := a.getDatabaseForcePing(testConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "TestConnection 连接测试失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "TestConnection 连接测试失败:耗时=%s %s", time.Since(started).Round(time.Millisecond), formatConnSummary(testConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("TestConnection 连接测试成功:%s", formatConnSummary(config))
|
||||
logger.Infof("TestConnection 连接测试成功:耗时=%s %s", time.Since(started).Round(time.Millisecond), formatConnSummary(testConfig))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
@@ -88,6 +103,8 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%s\"", escapedDbName)
|
||||
} else if dbType == "tdengine" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "clickhouse" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "mariadb" || dbType == "diros" {
|
||||
// MariaDB uses same syntax as MySQL
|
||||
} else if dbType == "sphinx" {
|
||||
@@ -99,7 +116,7 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "Database created successfully"}
|
||||
return connection.QueryResult{Success: true, Message: "数据库创建成功"}
|
||||
}
|
||||
|
||||
func resolveDDLDBType(config connection.ConnectionConfig) string {
|
||||
@@ -110,16 +127,39 @@ func resolveDDLDBType(config connection.ConnectionConfig) string {
|
||||
|
||||
driver := strings.ToLower(strings.TrimSpace(config.Driver))
|
||||
switch driver {
|
||||
case "postgresql":
|
||||
case "postgresql", "postgres", "pg", "pq", "pgx":
|
||||
return "postgres"
|
||||
case "dm":
|
||||
case "dm", "dameng", "dm8":
|
||||
return "dameng"
|
||||
case "sqlite3":
|
||||
case "sqlite3", "sqlite":
|
||||
return "sqlite"
|
||||
case "sphinxql":
|
||||
return "sphinx"
|
||||
case "diros", "doris":
|
||||
return "diros"
|
||||
case "kingbase", "kingbase8", "kingbasees", "kingbasev8":
|
||||
return "kingbase"
|
||||
case "highgo":
|
||||
return "highgo"
|
||||
case "vastbase":
|
||||
return "vastbase"
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.Contains(driver, "postgres"):
|
||||
return "postgres"
|
||||
case strings.Contains(driver, "kingbase"):
|
||||
return "kingbase"
|
||||
case strings.Contains(driver, "highgo"):
|
||||
return "highgo"
|
||||
case strings.Contains(driver, "vastbase"):
|
||||
return "vastbase"
|
||||
case strings.Contains(driver, "sqlite"):
|
||||
return "sqlite"
|
||||
case strings.Contains(driver, "sphinx"):
|
||||
return "sphinx"
|
||||
case strings.Contains(driver, "diros"), strings.Contains(driver, "doris"):
|
||||
return "diros"
|
||||
default:
|
||||
return driver
|
||||
}
|
||||
@@ -162,7 +202,7 @@ func buildRunConfigForDDL(config connection.ConnectionConfig, dbType string, dbN
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "custom") {
|
||||
// custom 连接的 dbName 语义依赖 driver,尽量在常见驱动上对齐内置类型行为。
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "vastbase", "dameng":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "vastbase", "dameng", "clickhouse":
|
||||
if strings.TrimSpace(dbName) != "" {
|
||||
runConfig.Database = strings.TrimSpace(dbName)
|
||||
}
|
||||
@@ -184,7 +224,7 @@ func (a *App) RenameDatabase(config connection.ConnectionConfig, oldName string,
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Diros/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Doris/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), oldName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再重命名"}
|
||||
@@ -216,7 +256,7 @@ func (a *App) DropDatabase(config connection.ConnectionConfig, dbName string) co
|
||||
sql string
|
||||
)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "tdengine", "clickhouse":
|
||||
runConfig = config
|
||||
runConfig.Database = ""
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
@@ -255,7 +295,7 @@ func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, old
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名表", dbType)}
|
||||
}
|
||||
@@ -269,7 +309,7 @@ func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, old
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualifiedTable := quoteTableIdentByType(dbType, schemaName, newTableName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualifiedTable, newQualifiedTable)
|
||||
case "sqlserver":
|
||||
@@ -301,7 +341,7 @@ func (a *App) DropTable(config connection.ConnectionConfig, dbName string, table
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除表", dbType)}
|
||||
}
|
||||
@@ -350,12 +390,21 @@ func (a *App) MySQLShowCreateTable(config connection.ConnectionConfig, dbName st
|
||||
}
|
||||
|
||||
func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
return a.DBQueryWithCancel(config, dbName, query, "")
|
||||
}
|
||||
|
||||
func (a *App) DBQueryWithCancel(config connection.ConnectionConfig, dbName string, query string, queryID string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
// Generate query ID if not provided
|
||||
if queryID == "" {
|
||||
queryID = generateQueryID()
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
@@ -366,12 +415,247 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
isReadQuery := strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain")
|
||||
// MongoDB JSON 命令中的 find/count/aggregate 也属于读查询
|
||||
if !isReadQuery && strings.ToLower(strings.TrimSpace(runConfig.Type)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
isReadQuery = true
|
||||
// Store cancel function for potential manual cancellation
|
||||
a.queryMu.Lock()
|
||||
a.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
a.queryMu.Unlock()
|
||||
|
||||
// Ensure query is removed from tracking when done
|
||||
defer func() {
|
||||
a.queryMu.Lock()
|
||||
delete(a.runningQueries, queryID)
|
||||
a.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
runReadQuery := func(inst db.Database) ([]map[string]interface{}, []string, error) {
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
runExecQuery := func(inst db.Database) (int64, error) {
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
|
||||
if isReadQuery {
|
||||
data, columns, err := runReadQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
data, columns, err = runReadQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns, QueryID: queryID}
|
||||
} else {
|
||||
affected, err := runExecQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
affected, err = runExecQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}, QueryID: queryID}
|
||||
}
|
||||
}
|
||||
|
||||
// DBQueryMulti 执行可能包含多条 SQL 语句的查询,返回多个结果集。
|
||||
// 如果底层驱动支持 MultiResultQuerier,一次性执行所有语句;
|
||||
// 否则按分号拆分后逐条执行,模拟多结果集。
|
||||
func (a *App) DBQueryMulti(config connection.ConnectionConfig, dbName string, query string, queryID string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
if queryID == "" {
|
||||
queryID = generateQueryID()
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryMulti 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
timeoutSeconds := runConfig.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
a.queryMu.Lock()
|
||||
a.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
a.queryMu.Unlock()
|
||||
defer func() {
|
||||
a.queryMu.Lock()
|
||||
delete(a.runningQueries, queryID)
|
||||
a.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
// 尝试使用驱动原生多结果集支持
|
||||
runMultiQuery := func(inst db.Database) ([]connection.ResultSetData, error) {
|
||||
if q, ok := inst.(db.MultiResultQuerierContext); ok {
|
||||
return q.QueryMultiContext(ctx, query)
|
||||
}
|
||||
if q, ok := inst.(db.MultiResultQuerier); ok {
|
||||
return q.QueryMulti(query)
|
||||
}
|
||||
return nil, nil // 返回 nil 表示不支持
|
||||
}
|
||||
|
||||
results, err := runMultiQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQueryMulti 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error(), QueryID: queryID}
|
||||
}
|
||||
results, err = runMultiQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryMulti 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
|
||||
// 驱动支持多结果集,直接返回
|
||||
if results != nil {
|
||||
return connection.QueryResult{Success: true, Data: results, QueryID: queryID}
|
||||
}
|
||||
|
||||
// 驱动不支持多结果集,回退到逐条执行
|
||||
statements := splitSQLStatements(query)
|
||||
if len(statements) == 0 {
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Data: []connection.ResultSetData{},
|
||||
QueryID: queryID,
|
||||
}
|
||||
}
|
||||
|
||||
var resultSets []connection.ResultSetData
|
||||
for idx, stmt := range statements {
|
||||
stmt = strings.TrimSpace(stmt)
|
||||
if stmt == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if isReadOnlySQLQuery(runConfig.Type, stmt) {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
if q, ok := dbInst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
data, columns, err = q.QueryContext(ctx, stmt)
|
||||
} else {
|
||||
data, columns, err = dbInst.Query(stmt)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryMulti 逐条查询失败(第 %d/%d 条):%s SQL片段=%q", idx+1, len(statements), formatConnSummary(runConfig), sqlSnippet(stmt))
|
||||
errMsg := fmt.Sprintf("第 %d 条语句执行失败: %v", idx+1, err)
|
||||
if len(resultSets) > 0 {
|
||||
errMsg += fmt.Sprintf("(前 %d 条已执行成功)", len(resultSets))
|
||||
}
|
||||
return connection.QueryResult{Success: false, Message: errMsg, QueryID: queryID}
|
||||
}
|
||||
if data == nil {
|
||||
data = make([]map[string]interface{}, 0)
|
||||
}
|
||||
if columns == nil {
|
||||
columns = []string{}
|
||||
}
|
||||
resultSets = append(resultSets, connection.ResultSetData{Rows: data, Columns: columns})
|
||||
} else {
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, stmt)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(stmt)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryMulti 逐条执行失败(第 %d/%d 条):%s SQL片段=%q", idx+1, len(statements), formatConnSummary(runConfig), sqlSnippet(stmt))
|
||||
errMsg := fmt.Sprintf("第 %d 条语句执行失败: %v", idx+1, err)
|
||||
if len(resultSets) > 0 {
|
||||
errMsg += fmt.Sprintf("(前 %d 条已执行成功)", len(resultSets))
|
||||
}
|
||||
return connection.QueryResult{Success: false, Message: errMsg, QueryID: queryID}
|
||||
}
|
||||
resultSets = append(resultSets, connection.ResultSetData{
|
||||
Rows: []map[string]interface{}{{"affectedRows": affected}},
|
||||
Columns: []string{"affectedRows"},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if resultSets == nil {
|
||||
resultSets = []connection.ResultSetData{}
|
||||
}
|
||||
// 回退到逐条执行且有多条语句时,附加提示信息
|
||||
var fallbackMsg string
|
||||
if len(statements) > 1 {
|
||||
fallbackMsg = fmt.Sprintf("当前数据源(%s)不支持原生多语句执行,已自动拆分为 %d 条语句逐条执行。", runConfig.Type, len(statements))
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: resultSets, QueryID: queryID, Message: fallbackMsg}
|
||||
}
|
||||
|
||||
func (a *App) DBQueryIsolated(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.openDatabaseIsolated(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
defer func() {
|
||||
if closeErr := dbInst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "DBQueryIsolated 关闭临时连接失败:%s", formatConnSummary(runConfig))
|
||||
}
|
||||
}()
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
timeoutSeconds := runConfig.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
@@ -383,25 +667,25 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
data, columns, err = dbInst.Query(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
logger.Error(err, "DBQueryIsolated 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
} else {
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
|
||||
func sqlSnippet(query string) string {
|
||||
@@ -413,20 +697,56 @@ func sqlSnippet(query string) string {
|
||||
return q[:max] + "..."
|
||||
}
|
||||
|
||||
func ensureNonNilSlice[T any](items []T) []T {
|
||||
if items == nil {
|
||||
return make([]T, 0)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.QueryResult {
|
||||
dbInst, err := a.getDatabase(config)
|
||||
runConfig := normalizeRunConfig(config, "")
|
||||
if strings.EqualFold(strings.TrimSpace(runConfig.Type), "redis") {
|
||||
runConfig.Type = "redis"
|
||||
client, err := a.getRedisClient(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取 Redis 连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
dbs, err := client.GetDatabases()
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取 Redis 库列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
resData := make([]map[string]string, 0, len(dbs))
|
||||
for _, item := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": strconv.Itoa(item.Index)})
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
dbs, err := dbInst.GetDatabases()
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetDatabases 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
dbs, err = retryInst.GetDatabases()
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
resData := make([]map[string]string, 0, len(dbs))
|
||||
for _, name := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": name})
|
||||
}
|
||||
@@ -436,6 +756,48 @@ func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.Quer
|
||||
|
||||
func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
if strings.EqualFold(strings.TrimSpace(runConfig.Type), "redis") {
|
||||
runConfig.Type = "redis"
|
||||
client, err := a.getRedisClient(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取 Redis 连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
cursor := uint64(0)
|
||||
tables := make([]string, 0, 128)
|
||||
seen := make(map[string]struct{}, 128)
|
||||
for {
|
||||
result, err := client.ScanKeys("*", cursor, 1000)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 扫描 Redis Key 失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
for _, item := range result.Keys {
|
||||
key := strings.TrimSpace(item.Key)
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
tables = append(tables, key)
|
||||
}
|
||||
if strings.TrimSpace(result.Cursor) == "" || strings.TrimSpace(result.Cursor) == "0" {
|
||||
break
|
||||
}
|
||||
next, err := strconv.ParseUint(strings.TrimSpace(result.Cursor), 10, 64)
|
||||
if err != nil || next == cursor {
|
||||
break
|
||||
}
|
||||
cursor = next
|
||||
}
|
||||
resData := make([]map[string]string, 0, len(tables))
|
||||
for _, name := range tables {
|
||||
resData = append(resData, map[string]string{"Table": name})
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -444,12 +806,22 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
tables, err := dbInst.GetTables(dbName)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetTables 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
tables, err = retryInst.GetTables(dbName)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取表列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
resData := make([]map[string]string, 0, len(tables))
|
||||
for _, name := range tables {
|
||||
resData = append(resData, map[string]string{"Table": name})
|
||||
}
|
||||
@@ -458,8 +830,8 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
dbType := resolveDDLDBType(config)
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -467,35 +839,65 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
sqlStr, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
sqlStr, err := resolveCreateStatementWithFallback(dbInst, config, dbName, tableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取建表语句失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if shouldFallbackCreateStatement(dbType, sqlStr) {
|
||||
columns, colErr := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if colErr != nil {
|
||||
logger.Error(colErr, "DBShowCreateTable 兜底加载字段失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: colErr.Error()}
|
||||
}
|
||||
fallbackDDL, buildErr := buildFallbackCreateStatement(dbType, schemaName, pureTableName, columns)
|
||||
if buildErr != nil {
|
||||
logger.Error(buildErr, "DBShowCreateTable 兜底生成 DDL 失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: buildErr.Error()}
|
||||
}
|
||||
sqlStr = fallbackDDL
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: sqlStr}
|
||||
}
|
||||
|
||||
func shouldFallbackCreateStatement(dbType string, ddl string) bool {
|
||||
func resolveCreateStatementWithFallback(dbInst db.Database, config connection.ConnectionConfig, dbName string, tableName string) (string, error) {
|
||||
dbType := resolveDDLDBType(config)
|
||||
schemaName, pureTableName := normalizeSchemaAndTableByType(dbType, dbName, tableName)
|
||||
if pureTableName == "" {
|
||||
return "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
sqlStr, sourceErr := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
if sourceErr == nil && !shouldFallbackCreateStatement(dbType, sqlStr) {
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
columns, colErr := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if colErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", colErr
|
||||
}
|
||||
|
||||
fallbackDDL, buildErr := buildFallbackCreateStatement(dbType, schemaName, pureTableName, columns)
|
||||
if buildErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", buildErr
|
||||
}
|
||||
return fallbackDDL, nil
|
||||
}
|
||||
|
||||
func supportsCreateStatementFallback(dbType string) bool {
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func shouldFallbackCreateStatement(dbType string, ddl string) bool {
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
return false
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(ddl)
|
||||
if trimmed == "" {
|
||||
@@ -601,7 +1003,7 @@ func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: columns}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(columns)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -618,7 +1020,7 @@ func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: indexes}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(indexes)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -635,7 +1037,7 @@ func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: fks}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(fks)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -652,7 +1054,7 @@ func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, t
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: triggers}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(triggers)}
|
||||
}
|
||||
|
||||
func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewName string) connection.QueryResult {
|
||||
@@ -663,7 +1065,7 @@ func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewNa
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除视图", dbType)}
|
||||
}
|
||||
@@ -752,7 +1154,7 @@ func (a *App) RenameView(config connection.ConnectionConfig, dbName string, oldN
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualified := quoteTableIdentByType(dbType, schemaName, newName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualified, newQualified)
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
@@ -790,5 +1192,5 @@ func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: cols}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(cols)}
|
||||
}
|
||||
|
||||
149
internal/app/methods_db_cancel_test.go
Normal file
149
internal/app/methods_db_cancel_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGenerateQueryID(t *testing.T) {
|
||||
app := NewApp()
|
||||
id := app.GenerateQueryID()
|
||||
if id == "" {
|
||||
t.Fatal("GenerateQueryID returned empty string")
|
||||
}
|
||||
// Should start with "query-"
|
||||
if !strings.HasPrefix(id, "query-") {
|
||||
t.Fatalf("Expected query ID to start with 'query-', got: %s", id)
|
||||
}
|
||||
// Should be reasonably unique (not equal to another generated ID)
|
||||
id2 := app.GenerateQueryID()
|
||||
if id == id2 {
|
||||
t.Fatal("Two consecutive GenerateQueryID calls returned identical IDs")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_NonExistent(t *testing.T) {
|
||||
app := NewApp()
|
||||
res := app.CancelQuery("non-existent-query-id")
|
||||
if res.Success {
|
||||
t.Fatal("CancelQuery should fail for non-existent query ID")
|
||||
}
|
||||
if !strings.Contains(res.Message, "不存在") && !strings.Contains(res.Message, "not exist") {
|
||||
t.Fatalf("Expected error message about query not existing, got: %s", res.Message)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_ValidQuery(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// First, generate a query ID and simulate a running query
|
||||
queryID := app.GenerateQueryID()
|
||||
|
||||
// Store a cancel function in runningQueries map
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Ensure cleanup after test
|
||||
defer func() {
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, queryID)
|
||||
app.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
// Cancel the query
|
||||
res := app.CancelQuery(queryID)
|
||||
if !res.Success {
|
||||
t.Fatalf("CancelQuery should succeed for valid query ID, got: %s", res.Message)
|
||||
}
|
||||
|
||||
// Verify query removed from map
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Query should be removed from runningQueries after cancellation")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanupStaleQueries(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// Add a stale query (started 2 hours ago)
|
||||
queryID := app.GenerateQueryID()
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now().Add(-2 * time.Hour),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify stale query was removed
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Stale query should be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Add a fresh query (started 30 minutes ago)
|
||||
freshID := app.GenerateQueryID()
|
||||
_, cancel2 := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[freshID] = queryContext{
|
||||
cancel: cancel2,
|
||||
started: time.Now().Add(-30 * time.Minute),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
defer cancel2()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify fresh query still exists
|
||||
app.queryMu.Lock()
|
||||
_, exists = app.runningQueries[freshID]
|
||||
app.queryMu.Unlock()
|
||||
if !exists {
|
||||
t.Fatal("Fresh query should not be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Clean up
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, freshID)
|
||||
app.queryMu.Unlock()
|
||||
}
|
||||
|
||||
func TestDBQueryWithCancel_QueryIDPropagation(t *testing.T) {
|
||||
// This test verifies that query ID is properly propagated in QueryResult
|
||||
// Since we can't easily mock database connections, we'll test the integration
|
||||
// by checking that DBQueryWithCancel returns a QueryResult with QueryID field
|
||||
|
||||
app := NewApp()
|
||||
|
||||
// Create a minimal config for a database type that doesn't require actual connection
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: ":memory:", // In-memory duckdb for testing
|
||||
}
|
||||
|
||||
// This will fail because we can't actually connect, but we can test the error path
|
||||
result := app.DBQueryWithCancel(config, "", "SELECT 1", "test-query-id")
|
||||
|
||||
// The query should fail (no actual database), but QueryID should be present
|
||||
if result.QueryID != "test-query-id" {
|
||||
t.Fatalf("Expected QueryID 'test-query-id' in result, got: %s", result.QueryID)
|
||||
}
|
||||
}
|
||||
112
internal/app/methods_db_conn_test.go
Normal file
112
internal/app/methods_db_conn_test.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeTestConnectionConfig_CapsTimeout(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{Timeout: 60}
|
||||
got := normalizeTestConnectionConfig(cfg)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("timeout 应被限制为 %d, got=%d", testConnectionTimeoutUpperBoundSeconds, got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_KeepSmallTimeout(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{Timeout: 5}
|
||||
got := normalizeTestConnectionConfig(cfg)
|
||||
if got.Timeout != 5 {
|
||||
t.Fatalf("timeout 不应被修改, got=%d", got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_ZeroTimeout(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{Timeout: 0}
|
||||
got := normalizeTestConnectionConfig(cfg)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("零值 timeout 应被修正, got=%d", got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatConnSummary_BasicMySQL(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Database: "test_db",
|
||||
Timeout: 30,
|
||||
}
|
||||
got := formatConnSummary(cfg)
|
||||
for _, want := range []string{"类型=mysql", "127.0.0.1:3306", "test_db", "root"} {
|
||||
if !strings.Contains(got, want) {
|
||||
t.Fatalf("formatConnSummary 应包含 %q, got=%q", want, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatConnSummary_SQLitePath(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "sqlite",
|
||||
Host: "/data/test.db",
|
||||
}
|
||||
got := formatConnSummary(cfg)
|
||||
if !strings.Contains(got, "类型=sqlite") {
|
||||
t.Fatalf("formatConnSummary 缺少类型, got=%q", got)
|
||||
}
|
||||
if !strings.Contains(got, "/data/test.db") {
|
||||
t.Fatalf("formatConnSummary 缺少路径, got=%q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatConnSummary_SSH(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "db.internal",
|
||||
Port: 3306,
|
||||
User: "app",
|
||||
UseSSH: true,
|
||||
SSH: connection.SSHConfig{
|
||||
Host: "jump.server",
|
||||
Port: 22,
|
||||
User: "admin",
|
||||
},
|
||||
}
|
||||
got := formatConnSummary(cfg)
|
||||
if !strings.Contains(got, "SSH=jump.server:22") {
|
||||
t.Fatalf("formatConnSummary 应包含 SSH 信息, got=%q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatConnSummary_Proxy(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "db.internal",
|
||||
Port: 3306,
|
||||
UseProxy: true,
|
||||
Proxy: connection.ProxyConfig{
|
||||
Type: "socks5",
|
||||
Host: "proxy.local",
|
||||
Port: 1080,
|
||||
},
|
||||
}
|
||||
got := formatConnSummary(cfg)
|
||||
if !strings.Contains(got, "代理=socks5://proxy.local:1080") {
|
||||
t.Fatalf("formatConnSummary 应包含代理信息, got=%q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatConnSummary_DefaultTimeout(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "localhost",
|
||||
Port: 3306,
|
||||
}
|
||||
got := formatConnSummary(cfg)
|
||||
if !strings.Contains(got, "超时=30s") {
|
||||
t.Fatalf("formatConnSummary 默认超时应为30s, got=%q", got)
|
||||
}
|
||||
}
|
||||
174
internal/app/methods_db_create_statement_test.go
Normal file
174
internal/app/methods_db_create_statement_test.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeCreateStatementDB struct {
|
||||
createSQL string
|
||||
createErr error
|
||||
columns []connection.ColumnDefinition
|
||||
columnsErr error
|
||||
|
||||
createSchema string
|
||||
createTable string
|
||||
colsSchema string
|
||||
colsTable string
|
||||
}
|
||||
|
||||
func (f *fakeCreateStatementDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeCreateStatementDB) Close() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Ping() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
return nil, nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeCreateStatementDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetTables(dbName string) ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
f.createSchema = dbName
|
||||
f.createTable = tableName
|
||||
return f.createSQL, f.createErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
f.colsSchema = dbName
|
||||
f.colsTable = tableName
|
||||
return f.columns, f.columnsErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestResolveDDLDBType_CustomDriverAlias(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
driver string
|
||||
want string
|
||||
}{
|
||||
{name: "postgresql alias", driver: "postgresql", want: "postgres"},
|
||||
{name: "pgx alias", driver: "pgx", want: "postgres"},
|
||||
{name: "kingbase8 alias", driver: "kingbase8", want: "kingbase"},
|
||||
{name: "kingbase contains alias", driver: "kingbasees", want: "kingbase"},
|
||||
{name: "dm alias", driver: "dm8", want: "dameng"},
|
||||
{name: "sqlite alias", driver: "sqlite3", want: "sqlite"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := connection.ConnectionConfig{Type: "custom", Driver: tc.driver}
|
||||
if got := resolveDDLDBType(cfg); got != tc.want {
|
||||
t.Fatalf("resolveDDLDBType() mismatch, want=%q got=%q", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_CustomKingbaseUsesPublicSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "kingbase8",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "public" || dbInst.colsSchema != "public" {
|
||||
t.Fatalf("expected fallback schema public, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with public schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_KeepQualifiedSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "-- SHOW CREATE TABLE not fully supported for PostgreSQL in this MVP.",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "integer", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "postgresql",
|
||||
}, "demo_db", "sales.orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "sales" || dbInst.colsSchema != "sales" {
|
||||
t.Fatalf("expected schema sales, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "sales"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with sales schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_NoFallbackForMySQL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columnsErr: errors.New("should not be called"),
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if ddl != dbInst.createSQL {
|
||||
t.Fatalf("expected original ddl for mysql, got: %s", ddl)
|
||||
}
|
||||
if dbInst.colsTable != "" {
|
||||
t.Fatalf("mysql path should not call GetColumns, got table=%q", dbInst.colsTable)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_FallbackWhenCreateStatementError(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createErr: errors.New("statement unsupported"),
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL for postgres error path, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
31
internal/app/methods_db_timeout_test.go
Normal file
31
internal/app/methods_db_timeout_test.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeTestConnectionConfig_DefaultToUpperBound(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 0}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("expected timeout=%d, got=%d", testConnectionTimeoutUpperBoundSeconds, got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_KeepSmallerTimeout(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 6}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != 6 {
|
||||
t.Fatalf("expected timeout=6, got=%d", got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_ClampLargeTimeout(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 60}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("expected timeout=%d, got=%d", testConnectionTimeoutUpperBoundSeconds, got.Timeout)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
275
internal/app/methods_file_export_test.go
Normal file
275
internal/app/methods_file_export_test.go
Normal file
@@ -0,0 +1,275 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeExportQueryDB struct {
|
||||
data []map[string]interface{}
|
||||
cols []string
|
||||
err error
|
||||
|
||||
lastQuery string
|
||||
lastContextTimeout time.Duration
|
||||
hasContextDeadline bool
|
||||
}
|
||||
|
||||
func (f *fakeExportQueryDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeExportQueryDB) Close() error { return nil }
|
||||
func (f *fakeExportQueryDB) Ping() error { return nil }
|
||||
func (f *fakeExportQueryDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
if deadline, ok := ctx.Deadline(); ok {
|
||||
f.hasContextDeadline = true
|
||||
f.lastContextTimeout = time.Until(deadline)
|
||||
}
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeExportQueryDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeExportQueryDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestFormatExportCellText_FloatNoScientificNotation(t *testing.T) {
|
||||
got := formatExportCellText(1.445663e+06)
|
||||
if strings.Contains(strings.ToLower(got), "e+") || strings.Contains(strings.ToLower(got), "e-") {
|
||||
t.Fatalf("不应输出科学计数法,got=%q", got)
|
||||
}
|
||||
if got != "1445663" {
|
||||
t.Fatalf("浮点整值导出异常,want=%q got=%q", "1445663", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_Markdown_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.md")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "md"); err != nil {
|
||||
t.Fatalf("写入 md 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 md 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("md 导出包含科学计数法: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "| 1445663 |") {
|
||||
t.Fatalf("md 导出未保留整数字面量,content=%s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_JSON_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.json")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "json"); err != nil {
|
||||
t.Fatalf("写入 json 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 json 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("json 导出包含科学计数法: %s", content)
|
||||
}
|
||||
|
||||
var decoded []map[string]json.Number
|
||||
decoder := json.NewDecoder(bytes.NewReader(contentBytes))
|
||||
decoder.UseNumber()
|
||||
if err := decoder.Decode(&decoded); err != nil {
|
||||
t.Fatalf("解析导出 json 失败: %v", err)
|
||||
}
|
||||
if len(decoded) != 1 {
|
||||
t.Fatalf("导出行数异常,got=%d", len(decoded))
|
||||
}
|
||||
if decoded[0]["id"].String() != "1445663" {
|
||||
t.Fatalf("json 数值格式异常,want=1445663 got=%s", decoded[0]["id"].String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesMinimumTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 10}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
if fake.lastQuery != "SELECT 1" {
|
||||
t.Fatalf("queryDataForExport 查询语句异常,want=%q got=%q", "SELECT 1", fake.lastQuery)
|
||||
}
|
||||
lowerBound := minExportQueryTimeout - 5*time.Second
|
||||
upperBound := minExportQueryTimeout + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出最小超时异常,want≈%s got=%s", minExportQueryTimeout, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesLargerConfiguredTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 900}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
expected := 900 * time.Second
|
||||
lowerBound := expected - 5*time.Second
|
||||
upperBound := expected + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出配置超时异常,want≈%s got=%s", expected, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_ClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_CustomClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("custom clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeAndStyle(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{
|
||||
"name": "<script>alert(1)</script>",
|
||||
"note": "line1\nline2",
|
||||
"nullable": nil,
|
||||
},
|
||||
}
|
||||
columns := []string{"name", "note", "nullable"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 html 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
|
||||
if !strings.Contains(content, "<!DOCTYPE html>") {
|
||||
t.Fatalf("html 导出缺少 doctype: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "position: sticky") {
|
||||
t.Fatalf("html 导出缺少表头吸顶样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "tbody tr:nth-child(even)") {
|
||||
t.Fatalf("html 导出缺少斑马纹样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出未进行 XSS 转义: %s", content)
|
||||
}
|
||||
if strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出包含未转义脚本: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "line1<br>line2") {
|
||||
t.Fatalf("html 导出换行未转为 <br>: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<td>NULL</td>") {
|
||||
t.Fatalf("html 导出空值显示异常: %s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeHeader(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
columnName := "<b>name</b>"
|
||||
data := []map[string]interface{}{{columnName: "ok"}}
|
||||
if err := writeRowsToFile(f, data, []string{columnName}, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
contentBytes, _ := os.ReadFile(f.Name())
|
||||
content := string(contentBytes)
|
||||
if !strings.Contains(content, "<th><b>name</b></th>") || strings.Contains(content, "<th><b>name</b></th>") {
|
||||
t.Fatalf("html 表头未正确转义: %s", content)
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,9 @@ import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
@@ -20,12 +23,20 @@ var (
|
||||
|
||||
// getRedisClient gets or creates a Redis client from cache
|
||||
func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisClient, error) {
|
||||
key := getRedisClientCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "Redis 代理准备失败:%s", formatRedisConnSummary(effectiveConfig))
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
key := getRedisClientCacheKey(connectConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
|
||||
redisCacheMu.Lock()
|
||||
defer redisCacheMu.Unlock()
|
||||
@@ -44,47 +55,69 @@ func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisCli
|
||||
|
||||
logger.Infof("创建 Redis 客户端实例:缓存Key=%s", shortKey)
|
||||
client := redis.NewRedisClient()
|
||||
if err := client.Connect(config); err != nil {
|
||||
logger.Error(err, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
return nil, err
|
||||
if err := client.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
redisCache[key] = client
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func getRedisClientCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
}
|
||||
b, _ := json.Marshal(config)
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func formatRedisConnSummary(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString("类型=redis 地址=")
|
||||
b.WriteString(config.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.Port))
|
||||
if topology := strings.TrimSpace(config.Topology); topology != "" {
|
||||
b.WriteString(" 模式=")
|
||||
b.WriteString(topology)
|
||||
}
|
||||
if len(config.Hosts) > 0 {
|
||||
b.WriteString(" 节点数=")
|
||||
b.WriteString(strconv.Itoa(len(config.Hosts)))
|
||||
}
|
||||
b.WriteString(" DB=")
|
||||
b.WriteString(string(rune(config.RedisDB + '0')))
|
||||
b.WriteString(strconv.Itoa(config.RedisDB))
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(" SSH=")
|
||||
b.WriteString(config.SSH.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.SSH.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.SSH.Port))
|
||||
b.WriteString(" 用户=")
|
||||
b.WriteString(config.SSH.User)
|
||||
}
|
||||
if config.UseProxy {
|
||||
b.WriteString(" 代理=")
|
||||
b.WriteString(strings.ToLower(strings.TrimSpace(config.Proxy.Type)))
|
||||
b.WriteString("://")
|
||||
b.WriteString(config.Proxy.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.Proxy.Port))
|
||||
if strings.TrimSpace(config.Proxy.User) != "" {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(" HTTP隧道=")
|
||||
b.WriteString(strings.TrimSpace(config.HTTPTunnel.Host))
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
@@ -107,14 +140,20 @@ func (a *App) RedisTestConnection(config connection.ConnectionConfig) connection
|
||||
}
|
||||
|
||||
// RedisScanKeys scans keys matching a pattern
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor uint64, count int64) connection.QueryResult {
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor any, count int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, cursor, count)
|
||||
parsedCursor, err := parseRedisScanCursor(cursor)
|
||||
if err != nil {
|
||||
logger.Warnf("RedisScanKeys 游标解析失败,已回退到起始游标:cursor=%v err=%v", cursor, err)
|
||||
parsedCursor = 0
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, parsedCursor, count)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisScanKeys 扫描失败:pattern=%s", pattern)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -123,6 +162,82 @@ func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string,
|
||||
return connection.QueryResult{Success: true, Data: result}
|
||||
}
|
||||
|
||||
func parseRedisScanCursor(cursor any) (uint64, error) {
|
||||
switch v := cursor.(type) {
|
||||
case nil:
|
||||
return 0, nil
|
||||
case uint64:
|
||||
return v, nil
|
||||
case uint32:
|
||||
return uint64(v), nil
|
||||
case uint16:
|
||||
return uint64(v), nil
|
||||
case uint8:
|
||||
return uint64(v), nil
|
||||
case uint:
|
||||
return uint64(v), nil
|
||||
case int64:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int32:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int16:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int8:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case float64:
|
||||
return parseRedisScanCursorFromFloat(v)
|
||||
case float32:
|
||||
return parseRedisScanCursorFromFloat(float64(v))
|
||||
case json.Number:
|
||||
return parseRedisScanCursor(strings.TrimSpace(v.String()))
|
||||
case string:
|
||||
trimmed := strings.TrimSpace(v)
|
||||
if trimmed == "" {
|
||||
return 0, nil
|
||||
}
|
||||
parsed, err := strconv.ParseUint(trimmed, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("无效游标: %q", v)
|
||||
}
|
||||
return parsed, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("不支持的游标类型: %T", cursor)
|
||||
}
|
||||
}
|
||||
|
||||
func parseRedisScanCursorFromFloat(value float64) (uint64, error) {
|
||||
if math.IsNaN(value) || math.IsInf(value, 0) {
|
||||
return 0, fmt.Errorf("无效浮点游标: %v", value)
|
||||
}
|
||||
if value < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %v", value)
|
||||
}
|
||||
if math.Trunc(value) != value {
|
||||
return 0, fmt.Errorf("游标必须为整数: %v", value)
|
||||
}
|
||||
if value > float64(math.MaxUint64) {
|
||||
return 0, fmt.Errorf("游标超出范围: %v", value)
|
||||
}
|
||||
return uint64(value), nil
|
||||
}
|
||||
|
||||
// RedisGetValue gets the value of a key
|
||||
func (a *App) RedisGetValue(config connection.ConnectionConfig, key string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
@@ -338,6 +453,23 @@ func (a *App) RedisRenameKey(config connection.ConnectionConfig, oldKey, newKey
|
||||
return connection.QueryResult{Success: true, Message: "重命名成功"}
|
||||
}
|
||||
|
||||
// RedisKeyExists checks whether a key already exists
|
||||
func (a *App) RedisKeyExists(config connection.ConnectionConfig, key string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
exists, err := client.KeyExists(key)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisKeyExists 检查失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: map[string]bool{"exists": exists}}
|
||||
}
|
||||
|
||||
// RedisDeleteHashField deletes fields from a hash
|
||||
func (a *App) RedisDeleteHashField(config connection.ConnectionConfig, key string, fields []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
|
||||
50
internal/app/methods_redis_cursor_test.go
Normal file
50
internal/app/methods_redis_cursor_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseRedisScanCursor(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
input any
|
||||
want uint64
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "nil defaults to zero", input: nil, want: 0},
|
||||
{name: "empty string defaults to zero", input: " ", want: 0},
|
||||
{name: "string cursor", input: "123", want: 123},
|
||||
{name: "uint64 cursor", input: uint64(456), want: 456},
|
||||
{name: "int cursor", input: int(789), want: 789},
|
||||
{name: "float cursor", input: float64(42), want: 42},
|
||||
{name: "json number cursor", input: json.Number("88"), want: 88},
|
||||
{name: "negative int rejected", input: -1, wantErr: true},
|
||||
{name: "fraction float rejected", input: float64(1.5), wantErr: true},
|
||||
{name: "invalid string rejected", input: "abc", wantErr: true},
|
||||
{name: "unsupported type rejected", input: true, wantErr: true},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := parseRedisScanCursor(tc.input)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil (value=%d)", got)
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if got != tc.want {
|
||||
t.Fatalf("parseRedisScanCursor() mismatch, want=%d got=%d", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -51,12 +51,13 @@ type UpdateInfo struct {
|
||||
}
|
||||
|
||||
type AppInfo struct {
|
||||
Version string `json:"version"`
|
||||
Author string `json:"author"`
|
||||
RepoURL string `json:"repoUrl,omitempty"`
|
||||
IssueURL string `json:"issueUrl,omitempty"`
|
||||
ReleaseURL string `json:"releaseUrl,omitempty"`
|
||||
BuildTime string `json:"buildTime,omitempty"`
|
||||
Version string `json:"version"`
|
||||
Author string `json:"author"`
|
||||
RepoURL string `json:"repoUrl,omitempty"`
|
||||
IssueURL string `json:"issueUrl,omitempty"`
|
||||
ReleaseURL string `json:"releaseUrl,omitempty"`
|
||||
CommunityURL string `json:"communityUrl,omitempty"`
|
||||
BuildTime string `json:"buildTime,omitempty"`
|
||||
}
|
||||
|
||||
type updateDownloadResult struct {
|
||||
@@ -137,12 +138,13 @@ func (a *App) CheckForUpdates() connection.QueryResult {
|
||||
|
||||
func (a *App) GetAppInfo() connection.QueryResult {
|
||||
info := AppInfo{
|
||||
Version: getCurrentVersion(),
|
||||
Author: getCurrentAuthor(),
|
||||
RepoURL: "https://github.com/" + updateRepo,
|
||||
IssueURL: "https://github.com/" + updateRepo + "/issues",
|
||||
ReleaseURL: "https://github.com/" + updateRepo + "/releases",
|
||||
BuildTime: strings.TrimSpace(AppBuildTime),
|
||||
Version: getCurrentVersion(),
|
||||
Author: getCurrentAuthor(),
|
||||
RepoURL: "https://github.com/" + updateRepo,
|
||||
IssueURL: "https://github.com/" + updateRepo + "/issues",
|
||||
ReleaseURL: "https://github.com/" + updateRepo + "/releases",
|
||||
CommunityURL: "https://aibook.ren",
|
||||
BuildTime: strings.TrimSpace(AppBuildTime),
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: info}
|
||||
}
|
||||
@@ -233,6 +235,49 @@ func (a *App) InstallUpdateAndRestart() connection.QueryResult {
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) OpenDownloadedUpdateDirectory() connection.QueryResult {
|
||||
a.updateMu.Lock()
|
||||
staged := a.updateState.staged
|
||||
a.updateMu.Unlock()
|
||||
if staged == nil {
|
||||
return connection.QueryResult{Success: false, Message: "未找到已下载的更新包"}
|
||||
}
|
||||
assetPath := strings.TrimSpace(staged.FilePath)
|
||||
if assetPath == "" {
|
||||
return connection.QueryResult{Success: false, Message: "更新包路径为空"}
|
||||
}
|
||||
dirPath := strings.TrimSpace(filepath.Dir(assetPath))
|
||||
if dirPath == "" || dirPath == "." {
|
||||
return connection.QueryResult{Success: false, Message: "无法解析更新目录"}
|
||||
}
|
||||
if stat, err := os.Stat(dirPath); err != nil || !stat.IsDir() {
|
||||
return connection.QueryResult{Success: false, Message: "更新目录不存在或不可访问"}
|
||||
}
|
||||
|
||||
var cmd *exec.Cmd
|
||||
switch stdRuntime.GOOS {
|
||||
case "darwin":
|
||||
cmd = exec.Command("open", dirPath)
|
||||
case "windows":
|
||||
cmd = exec.Command("explorer", dirPath)
|
||||
case "linux":
|
||||
cmd = exec.Command("xdg-open", dirPath)
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前平台暂不支持打开目录:%s", stdRuntime.GOOS)}
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
logger.Error(err, "打开更新目录失败")
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("打开更新目录失败:%v", err)}
|
||||
}
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("已打开安装目录:%s", dirPath),
|
||||
Data: map[string]any{
|
||||
"path": dirPath,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) downloadAndStageUpdate(info UpdateInfo) connection.QueryResult {
|
||||
workspaceDir := strings.TrimSpace(resolveUpdateWorkspaceDir(info.LatestVersion))
|
||||
if workspaceDir == "" {
|
||||
@@ -374,7 +419,7 @@ func getCurrentAuthor() string {
|
||||
}
|
||||
|
||||
func fetchLatestRelease() (*githubRelease, error) {
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
client := newHTTPClientWithGlobalProxy(15 * time.Second)
|
||||
req, err := http.NewRequest(http.MethodGet, updateAPIURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -451,7 +496,7 @@ func fetchReleaseSHA256(assets []githubAsset) (map[string]string, error) {
|
||||
return nil, errors.New("Release 未提供 SHA256SUMS")
|
||||
}
|
||||
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
client := newHTTPClientWithGlobalProxy(15 * time.Second)
|
||||
req, err := http.NewRequest(http.MethodGet, checksumURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -522,7 +567,7 @@ func (w *downloadProgressWriter) Write(p []byte) (int, error) {
|
||||
}
|
||||
|
||||
func downloadFileWithHash(url, filePath string, onProgress func(downloaded, total int64)) (string, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Minute}
|
||||
client := newHTTPClientWithGlobalProxy(10 * time.Minute)
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -5,6 +5,66 @@ import (
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func leadingSQLKeyword(query string) string {
|
||||
text := strings.TrimSpace(query)
|
||||
for len(text) > 0 {
|
||||
trimmed := strings.TrimLeft(text, " \t\r\n")
|
||||
if trimmed == "" {
|
||||
return ""
|
||||
}
|
||||
text = trimmed
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(text, "--"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "#"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "/*"):
|
||||
if idx := strings.Index(text, "*/"); idx >= 0 {
|
||||
text = text[idx+2:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if text == "" {
|
||||
return ""
|
||||
}
|
||||
for i, r := range text {
|
||||
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' {
|
||||
continue
|
||||
}
|
||||
if i == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToLower(text[:i])
|
||||
}
|
||||
return strings.ToLower(text)
|
||||
}
|
||||
|
||||
func isReadOnlySQLQuery(dbType string, query string) bool {
|
||||
if strings.ToLower(strings.TrimSpace(dbType)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
return true
|
||||
}
|
||||
|
||||
switch leadingSQLKeyword(query) {
|
||||
case "select", "with", "show", "describe", "desc", "explain", "pragma", "values":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func sanitizeSQLForPgLike(dbType string, query string) string {
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
|
||||
175
internal/app/sql_split.go
Normal file
175
internal/app/sql_split.go
Normal file
@@ -0,0 +1,175 @@
|
||||
package app
|
||||
|
||||
import "strings"
|
||||
|
||||
// splitSQLStatements 按分号拆分 SQL 文本为独立语句。
|
||||
// 正确处理单引号/双引号/反引号字符串、行注释(-- / #)、块注释(/* */)和
|
||||
// PostgreSQL/Kingbase 的 $$...$$ dollar-quoting,避免在这些上下文中错误拆分。
|
||||
// 同时支持 SQL 标准的转义单引号(两个连续单引号 '' 表示字面量引号)。
|
||||
func splitSQLStatements(sql string) []string {
|
||||
text := strings.ReplaceAll(sql, "\r\n", "\n")
|
||||
var statements []string
|
||||
|
||||
var cur strings.Builder
|
||||
inSingle := false
|
||||
inDouble := false
|
||||
inBacktick := false
|
||||
escaped := false
|
||||
inLineComment := false
|
||||
inBlockComment := false
|
||||
var dollarTag string // postgres/kingbase: $$...$$ or $tag$...$tag$
|
||||
|
||||
push := func() {
|
||||
s := strings.TrimSpace(cur.String())
|
||||
if s != "" {
|
||||
statements = append(statements, s)
|
||||
}
|
||||
cur.Reset()
|
||||
}
|
||||
|
||||
for i := 0; i < len(text); i++ {
|
||||
ch := text[i]
|
||||
next := byte(0)
|
||||
if i+1 < len(text) {
|
||||
next = text[i+1]
|
||||
}
|
||||
|
||||
// 行注释
|
||||
if inLineComment {
|
||||
if ch == '\n' {
|
||||
inLineComment = false
|
||||
}
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 块注释
|
||||
if inBlockComment {
|
||||
cur.WriteByte(ch)
|
||||
if ch == '*' && next == '/' {
|
||||
cur.WriteByte('/')
|
||||
i++
|
||||
inBlockComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Dollar-quoting
|
||||
if dollarTag != "" {
|
||||
if strings.HasPrefix(text[i:], dollarTag) {
|
||||
cur.WriteString(dollarTag)
|
||||
i += len(dollarTag) - 1
|
||||
dollarTag = ""
|
||||
} else {
|
||||
cur.WriteByte(ch)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// 转义字符(反斜杠转义,MySQL 风格)
|
||||
if escaped {
|
||||
escaped = false
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if (inSingle || inDouble) && ch == '\\' {
|
||||
escaped = true
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 字符串开闭
|
||||
if !inDouble && !inBacktick && ch == '\'' {
|
||||
if inSingle && next == '\'' {
|
||||
// SQL 标准转义:两个连续单引号 '' 表示字面量引号,保持在引号内
|
||||
cur.WriteByte(ch)
|
||||
cur.WriteByte(next)
|
||||
i++
|
||||
continue
|
||||
}
|
||||
inSingle = !inSingle
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if !inSingle && !inBacktick && ch == '"' {
|
||||
inDouble = !inDouble
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if !inSingle && !inDouble && ch == '`' {
|
||||
inBacktick = !inBacktick
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 在引号/反引号内部不做任何判断
|
||||
if inSingle || inDouble || inBacktick {
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 行注释开始
|
||||
if ch == '-' && next == '-' {
|
||||
inLineComment = true
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if ch == '#' {
|
||||
inLineComment = true
|
||||
cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 块注释开始
|
||||
if ch == '/' && next == '*' {
|
||||
inBlockComment = true
|
||||
cur.WriteString("/*")
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Dollar-quoting 开始
|
||||
if ch == '$' {
|
||||
if tag := parseSQLDollarTag(text[i:]); tag != "" {
|
||||
dollarTag = tag
|
||||
cur.WriteString(tag)
|
||||
i += len(tag) - 1
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// 分号分隔(支持全角分号";")
|
||||
if ch == ';' {
|
||||
push()
|
||||
continue
|
||||
}
|
||||
// 全角分号 UTF-8 序列: 0xEF 0xBC 0x9B
|
||||
if ch == 0xEF && i+2 < len(text) && text[i+1] == 0xBC && text[i+2] == 0x9B {
|
||||
push()
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
|
||||
cur.WriteByte(ch)
|
||||
}
|
||||
|
||||
push()
|
||||
return statements
|
||||
}
|
||||
|
||||
// parseSQLDollarTag 解析 PostgreSQL/Kingbase 的 dollar-quoting 标签。
|
||||
func parseSQLDollarTag(s string) string {
|
||||
if len(s) < 2 || s[0] != '$' {
|
||||
return ""
|
||||
}
|
||||
for i := 1; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c == '$' {
|
||||
return s[:i+1]
|
||||
}
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_') {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
209
internal/app/sql_split_stream.go
Normal file
209
internal/app/sql_split_stream.go
Normal file
@@ -0,0 +1,209 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// sqlStreamSplitter 是一个流式 SQL 语句拆分器,适用于处理大文件。
|
||||
// 调用方通过 Feed(chunk) 逐块喂入数据,通过 Flush() 获取最后一条残余语句。
|
||||
// 内部维护与 splitSQLStatements 完全一致的状态机逻辑。
|
||||
type sqlStreamSplitter struct {
|
||||
cur strings.Builder
|
||||
inSingle bool
|
||||
inDouble bool
|
||||
inBacktick bool
|
||||
escaped bool
|
||||
inLineComment bool
|
||||
inBlockComment bool
|
||||
dollarTag string
|
||||
}
|
||||
|
||||
// Feed 将一个 chunk 喂入拆分器,返回在此 chunk 中完成的 SQL 语句列表。
|
||||
func (s *sqlStreamSplitter) Feed(chunk []byte) []string {
|
||||
var statements []string
|
||||
text := string(chunk)
|
||||
|
||||
for i := 0; i < len(text); i++ {
|
||||
ch := text[i]
|
||||
next := byte(0)
|
||||
if i+1 < len(text) {
|
||||
next = text[i+1]
|
||||
}
|
||||
|
||||
// 行注释
|
||||
if s.inLineComment {
|
||||
if ch == '\n' {
|
||||
s.inLineComment = false
|
||||
}
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 块注释
|
||||
if s.inBlockComment {
|
||||
s.cur.WriteByte(ch)
|
||||
if ch == '*' && next == '/' {
|
||||
s.cur.WriteByte('/')
|
||||
i++
|
||||
s.inBlockComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Dollar-quoting
|
||||
if s.dollarTag != "" {
|
||||
if strings.HasPrefix(text[i:], s.dollarTag) {
|
||||
s.cur.WriteString(s.dollarTag)
|
||||
i += len(s.dollarTag) - 1
|
||||
s.dollarTag = ""
|
||||
} else {
|
||||
s.cur.WriteByte(ch)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// 转义字符
|
||||
if s.escaped {
|
||||
s.escaped = false
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if (s.inSingle || s.inDouble) && ch == '\\' {
|
||||
s.escaped = true
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 字符串开闭
|
||||
if !s.inDouble && !s.inBacktick && ch == '\'' {
|
||||
if s.inSingle && next == '\'' {
|
||||
// SQL 标准转义:两个连续单引号
|
||||
s.cur.WriteByte(ch)
|
||||
s.cur.WriteByte(next)
|
||||
i++
|
||||
continue
|
||||
}
|
||||
s.inSingle = !s.inSingle
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if !s.inSingle && !s.inBacktick && ch == '"' {
|
||||
s.inDouble = !s.inDouble
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if !s.inSingle && !s.inDouble && ch == '`' {
|
||||
s.inBacktick = !s.inBacktick
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 在引号/反引号内部不做任何判断
|
||||
if s.inSingle || s.inDouble || s.inBacktick {
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 行注释开始
|
||||
if ch == '-' && next == '-' {
|
||||
s.inLineComment = true
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if ch == '#' {
|
||||
s.inLineComment = true
|
||||
s.cur.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
|
||||
// 块注释开始
|
||||
if ch == '/' && next == '*' {
|
||||
s.inBlockComment = true
|
||||
s.cur.WriteString("/*")
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Dollar-quoting 开始
|
||||
if ch == '$' {
|
||||
if tag := parseSQLDollarTag(text[i:]); tag != "" {
|
||||
s.dollarTag = tag
|
||||
s.cur.WriteString(tag)
|
||||
i += len(tag) - 1
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// 分号分隔
|
||||
if ch == ';' {
|
||||
stmt := strings.TrimSpace(s.cur.String())
|
||||
if stmt != "" {
|
||||
statements = append(statements, stmt)
|
||||
}
|
||||
s.cur.Reset()
|
||||
continue
|
||||
}
|
||||
// 全角分号
|
||||
if ch == 0xEF && i+2 < len(text) && text[i+1] == 0xBC && text[i+2] == 0x9B {
|
||||
stmt := strings.TrimSpace(s.cur.String())
|
||||
if stmt != "" {
|
||||
statements = append(statements, stmt)
|
||||
}
|
||||
s.cur.Reset()
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
|
||||
s.cur.WriteByte(ch)
|
||||
}
|
||||
|
||||
return statements
|
||||
}
|
||||
|
||||
// Flush 返回缓冲区中剩余的不完整语句(文件结束时调用)。
|
||||
func (s *sqlStreamSplitter) Flush() string {
|
||||
stmt := strings.TrimSpace(s.cur.String())
|
||||
s.cur.Reset()
|
||||
return stmt
|
||||
}
|
||||
|
||||
// streamSQLFile 从 reader 中流式读取 SQL 并逐条回调。
|
||||
// onStatement 返回 error 时停止读取并返回该 error。
|
||||
// 返回总处理语句数和可能的错误。
|
||||
func streamSQLFile(reader io.Reader, onStatement func(index int, stmt string) error) (int, error) {
|
||||
splitter := &sqlStreamSplitter{}
|
||||
scanner := bufio.NewScanner(reader)
|
||||
// 设置最大 token 为 4MB,处理超长单行
|
||||
const maxLineSize = 4 * 1024 * 1024
|
||||
scanner.Buffer(make([]byte, 0, 64*1024), maxLineSize)
|
||||
|
||||
count := 0
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
// 保持换行符,因为行注释依赖 \n 来结束
|
||||
lineWithNewline := append(line, '\n')
|
||||
stmts := splitter.Feed(lineWithNewline)
|
||||
for _, stmt := range stmts {
|
||||
if err := onStatement(count, stmt); err != nil {
|
||||
return count, err
|
||||
}
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return count, err
|
||||
}
|
||||
|
||||
// 处理文件末尾不以分号结尾的最后一条语句
|
||||
if last := splitter.Flush(); last != "" {
|
||||
if err := onStatement(count, last); err != nil {
|
||||
return count, err
|
||||
}
|
||||
count++
|
||||
}
|
||||
|
||||
return count, nil
|
||||
}
|
||||
113
internal/app/sql_split_test.go
Normal file
113
internal/app/sql_split_test.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSplitSQLStatements_BasicSplit(t *testing.T) {
|
||||
input := "SELECT 1; SELECT 2; SELECT 3"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT 1", "SELECT 2", "SELECT 3"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_QuotedSemicolon(t *testing.T) {
|
||||
input := `SELECT 'hello;world'; SELECT 2`
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{`SELECT 'hello;world'`, "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_LineComment(t *testing.T) {
|
||||
input := "SELECT 1; -- this is a comment;\nSELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT 1", "-- this is a comment;\nSELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_BlockComment(t *testing.T) {
|
||||
input := "SELECT /* ; */ 1; SELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT /* ; */ 1", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_EmptyInput(t *testing.T) {
|
||||
got := splitSQLStatements("")
|
||||
if len(got) != 0 {
|
||||
t.Errorf("splitSQLStatements(\"\") = %v, want empty slice", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_SingleStatement(t *testing.T) {
|
||||
input := "SELECT * FROM users WHERE id = 1"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT * FROM users WHERE id = 1"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_DollarQuoting(t *testing.T) {
|
||||
input := "SELECT $tag$hello;world$tag$; SELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT $tag$hello;world$tag$", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_FullWidthSemicolon(t *testing.T) {
|
||||
input := "SELECT 1;SELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT 1", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_Backtick(t *testing.T) {
|
||||
input := "SELECT `col;name` FROM t; SELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT `col;name` FROM t", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_TrailingSemicolon(t *testing.T) {
|
||||
input := "SELECT 1; SELECT 2;"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT 1", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_SQLEscapedQuote(t *testing.T) {
|
||||
input := "SELECT 'it''s a test'; SELECT 2"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"SELECT 'it''s a test'", "SELECT 2"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitSQLStatements_SQLEscapedQuoteMultiple(t *testing.T) {
|
||||
input := "INSERT INTO t VALUES ('O''Brien', 'it''s OK'); SELECT 1"
|
||||
got := splitSQLStatements(input)
|
||||
want := []string{"INSERT INTO t VALUES ('O''Brien', 'it''s OK')", "SELECT 1"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("splitSQLStatements(%q) = %v, want %v", input, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package connection
|
||||
|
||||
// SSHConfig holds SSH connection details
|
||||
// SSHConfig 存储 SSH 隧道连接配置。
|
||||
type SSHConfig struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
@@ -9,7 +9,7 @@ type SSHConfig struct {
|
||||
KeyPath string `json:"keyPath"`
|
||||
}
|
||||
|
||||
// ProxyConfig holds proxy connection details
|
||||
// ProxyConfig 存储代理连接配置。
|
||||
type ProxyConfig struct {
|
||||
Type string `json:"type"` // socks5 | http
|
||||
Host string `json:"host"`
|
||||
@@ -18,46 +18,67 @@ type ProxyConfig struct {
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// ConnectionConfig holds database connection details including SSH
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
// HTTPTunnelConfig 存储 HTTP CONNECT 隧道配置。
|
||||
type HTTPTunnelConfig struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user,omitempty"`
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
// ConnectionConfig 存储数据库连接的完整配置,包括 SSH、代理、SSL 等网络层设置。
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSL bool `json:"useSSL,omitempty"` // MySQL-like SSL/TLS switch
|
||||
SSLMode string `json:"sslMode,omitempty"` // preferred | required | skip-verify | disable
|
||||
SSLCertPath string `json:"sslCertPath,omitempty"` // TLS client certificate path (e.g., Dameng)
|
||||
SSLKeyPath string `json:"sslKeyPath,omitempty"` // TLS client private key path (e.g., Dameng)
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
UseHTTPTunnel bool `json:"useHttpTunnel,omitempty"`
|
||||
HTTPTunnel HTTPTunnelConfig `json:"httpTunnel,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica | cluster
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
}
|
||||
|
||||
// ResultSetData 表示一个查询结果集(行 + 列名),用于多结果集场景。
|
||||
type ResultSetData struct {
|
||||
Rows []map[string]interface{} `json:"rows"`
|
||||
Columns []string `json:"columns"`
|
||||
}
|
||||
|
||||
// QueryResult 是 Wails 绑定方法的统一响应格式,前端通过此结构体接收后端结果。
|
||||
type QueryResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Data interface{} `json:"data"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
QueryID string `json:"queryId,omitempty"` // Unique ID for query cancellation
|
||||
}
|
||||
|
||||
// ColumnDefinition represents a table column
|
||||
// ColumnDefinition 描述表的一个列定义。
|
||||
type ColumnDefinition struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
@@ -68,16 +89,17 @@ type ColumnDefinition struct {
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
// IndexDefinition represents a table index
|
||||
// IndexDefinition 描述表的一个索引定义。
|
||||
type IndexDefinition struct {
|
||||
Name string `json:"name"`
|
||||
ColumnName string `json:"columnName"`
|
||||
NonUnique int `json:"nonUnique"`
|
||||
SeqInIndex int `json:"seqInIndex"`
|
||||
IndexType string `json:"indexType"`
|
||||
SubPart int `json:"subPart,omitempty"`
|
||||
}
|
||||
|
||||
// ForeignKeyDefinition represents a foreign key
|
||||
// ForeignKeyDefinition 描述表的一个外键定义。
|
||||
type ForeignKeyDefinition struct {
|
||||
Name string `json:"name"`
|
||||
ColumnName string `json:"columnName"`
|
||||
@@ -86,7 +108,7 @@ type ForeignKeyDefinition struct {
|
||||
ConstraintName string `json:"constraintName"`
|
||||
}
|
||||
|
||||
// TriggerDefinition represents a trigger
|
||||
// TriggerDefinition 描述表的一个触发器定义。
|
||||
type TriggerDefinition struct {
|
||||
Name string `json:"name"`
|
||||
Timing string `json:"timing"` // BEFORE/AFTER
|
||||
@@ -94,26 +116,27 @@ type TriggerDefinition struct {
|
||||
Statement string `json:"statement"`
|
||||
}
|
||||
|
||||
// ColumnDefinitionWithTable represents a column with its table name (for search/autocomplete)
|
||||
// ColumnDefinitionWithTable 带有表名标识的列定义,用于跨表搜索和 SQL 自动补全。
|
||||
type ColumnDefinitionWithTable struct {
|
||||
TableName string `json:"tableName"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
// UpdateRow represents a row update with keys (WHERE) and values (SET)
|
||||
// UpdateRow 表示一行更新操作,Keys 为 WHERE 条件,Values 为 SET 值。
|
||||
type UpdateRow struct {
|
||||
Keys map[string]interface{} `json:"keys"`
|
||||
Values map[string]interface{} `json:"values"`
|
||||
}
|
||||
|
||||
// ChangeSet represents a batch of changes
|
||||
// ChangeSet 表示一组批量变更,包含新增、修改和删除操作。
|
||||
type ChangeSet struct {
|
||||
Inserts []map[string]interface{} `json:"inserts"`
|
||||
Updates []UpdateRow `json:"updates"`
|
||||
Deletes []map[string]interface{} `json:"deletes"`
|
||||
}
|
||||
|
||||
// MongoMemberInfo 描述 MongoDB 副本集成员的信息。
|
||||
type MongoMemberInfo struct {
|
||||
Host string `json:"host"`
|
||||
Role string `json:"role"`
|
||||
|
||||
812
internal/db/clickhouse_impl.go
Normal file
812
internal/db/clickhouse_impl.go
Normal file
@@ -0,0 +1,812 @@
|
||||
//go:build gonavi_full_drivers || gonavi_clickhouse_driver
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
clickhouse "github.com/ClickHouse/clickhouse-go/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultClickHousePort = 9000
|
||||
defaultClickHouseUser = "default"
|
||||
defaultClickHouseDatabase = "default"
|
||||
minClickHouseReadTimeout = 5 * time.Minute
|
||||
)
|
||||
|
||||
type ClickHouseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
database string
|
||||
}
|
||||
|
||||
func normalizeClickHouseConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := applyClickHouseURI(config)
|
||||
if strings.TrimSpace(normalized.Host) == "" {
|
||||
normalized.Host = "localhost"
|
||||
}
|
||||
if normalized.Port <= 0 {
|
||||
normalized.Port = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(normalized.User) == "" {
|
||||
normalized.User = defaultClickHouseUser
|
||||
}
|
||||
if strings.TrimSpace(normalized.Database) == "" {
|
||||
normalized.Database = defaultClickHouseDatabase
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
func applyClickHouseURI(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return config
|
||||
}
|
||||
lowerURI := strings.ToLower(uriText)
|
||||
if !strings.HasPrefix(lowerURI, "clickhouse://") {
|
||||
return config
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return config
|
||||
}
|
||||
|
||||
if parsed.User != nil {
|
||||
if strings.TrimSpace(config.User) == "" {
|
||||
config.User = parsed.User.Username()
|
||||
}
|
||||
if pass, ok := parsed.User.Password(); ok && config.Password == "" {
|
||||
config.Password = pass
|
||||
}
|
||||
}
|
||||
|
||||
if dbName := strings.TrimPrefix(strings.TrimSpace(parsed.Path), "/"); dbName != "" && strings.TrimSpace(config.Database) == "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
if strings.TrimSpace(config.Database) == "" {
|
||||
if dbName := strings.TrimSpace(parsed.Query().Get("database")); dbName != "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
}
|
||||
|
||||
defaultPort := config.Port
|
||||
if defaultPort <= 0 {
|
||||
defaultPort = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(config.Host) == "" {
|
||||
host, port, ok := parseHostPortWithDefault(parsed.Host, defaultPort)
|
||||
if ok {
|
||||
config.Host = host
|
||||
config.Port = port
|
||||
}
|
||||
}
|
||||
if config.Port <= 0 {
|
||||
config.Port = defaultPort
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) buildClickHouseOptions(config connection.ConnectionConfig) *clickhouse.Options {
|
||||
connectTimeout := getConnectTimeout(config)
|
||||
readTimeout := connectTimeout
|
||||
if readTimeout < minClickHouseReadTimeout {
|
||||
readTimeout = minClickHouseReadTimeout
|
||||
}
|
||||
protocol := detectClickHouseProtocol(config)
|
||||
opts := &clickhouse.Options{
|
||||
Protocol: protocol,
|
||||
Addr: []string{
|
||||
net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
},
|
||||
Auth: clickhouse.Auth{
|
||||
Database: strings.TrimSpace(config.Database),
|
||||
Username: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
},
|
||||
DialTimeout: connectTimeout,
|
||||
ReadTimeout: readTimeout,
|
||||
}
|
||||
if tlsConfig := resolveGenericTLSConfig(config); tlsConfig != nil {
|
||||
opts.TLS = tlsConfig
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func detectClickHouseProtocol(config connection.ConnectionConfig) clickhouse.Protocol {
|
||||
uriText := strings.ToLower(strings.TrimSpace(config.URI))
|
||||
if strings.HasPrefix(uriText, "http://") || strings.HasPrefix(uriText, "https://") {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
if config.Port == 8123 || config.Port == 8443 {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
return clickhouse.Native
|
||||
}
|
||||
|
||||
func isClickHouseProtocolMismatch(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
text := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
return strings.Contains(text, "unexpected packet [72]") ||
|
||||
(strings.Contains(text, "unexpected packet") && strings.Contains(text, "handshake")) ||
|
||||
strings.Contains(text, "http response to https client") ||
|
||||
strings.Contains(text, "malformed http response")
|
||||
}
|
||||
|
||||
func withClickHouseProtocol(config connection.ConnectionConfig, protocol clickhouse.Protocol) connection.ConnectionConfig {
|
||||
next := config
|
||||
switch protocol {
|
||||
case clickhouse.HTTP:
|
||||
if next.Port == 0 {
|
||||
next.Port = 8123
|
||||
}
|
||||
default:
|
||||
if next.Port == 0 {
|
||||
next.Port = defaultClickHousePort
|
||||
}
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Connect(config connection.ConnectionConfig) error {
|
||||
if supported, reason := DriverRuntimeSupportStatus("clickhouse"); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = "ClickHouse 纯 Go 驱动未启用,请先在驱动管理中安装启用"
|
||||
}
|
||||
return fmt.Errorf("%s", reason)
|
||||
}
|
||||
|
||||
if c.forwarder != nil {
|
||||
_ = c.forwarder.Close()
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
|
||||
runConfig := normalizeClickHouseConfig(config)
|
||||
c.pingTimeout = getConnectTimeout(runConfig)
|
||||
c.database = runConfig.Database
|
||||
|
||||
if runConfig.UseSSH {
|
||||
logger.Infof("ClickHouse 使用 SSH 连接:地址=%s:%d 用户=%s", runConfig.Host, runConfig.Port, runConfig.User)
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(runConfig.SSH, runConfig.Host, runConfig.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
c.forwarder = forwarder
|
||||
|
||||
host, portText, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
port, err := strconv.Atoi(portText)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
runConfig.Host = host
|
||||
runConfig.Port = port
|
||||
runConfig.UseSSH = false
|
||||
logger.Infof("ClickHouse 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
primaryProtocol := detectClickHouseProtocol(attempt)
|
||||
protocols := []clickhouse.Protocol{primaryProtocol}
|
||||
if primaryProtocol == clickhouse.Native {
|
||||
protocols = append(protocols, clickhouse.HTTP)
|
||||
} else {
|
||||
protocols = append(protocols, clickhouse.Native)
|
||||
}
|
||||
|
||||
for pIdx, protocol := range protocols {
|
||||
protocolConfig := withClickHouseProtocol(attempt, protocol)
|
||||
c.conn = clickhouse.OpenDB(c.buildClickHouseOptions(protocolConfig))
|
||||
if err := c.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败(protocol=%s): %v", idx+1, protocol.String(), err))
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
if pIdx == 0 && !isClickHouseProtocolMismatch(err) {
|
||||
// 首次连接不是协议误配特征,避免无谓重试次协议。
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("ClickHouse SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
if pIdx > 0 {
|
||||
logger.Warnf("ClickHouse 已自动切换连接协议为 %s(常见于 8123/8443 HTTP 端口)", protocol.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
_ = c.Close()
|
||||
return fmt.Errorf("连接建立后验证失败(可检查 ClickHouse 端口与协议是否匹配:Native=9000/9440,HTTP=8123/8443):%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Close() error {
|
||||
if c.forwarder != nil {
|
||||
if err := c.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 ClickHouse SSH 端口转发失败:%v", err)
|
||||
}
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
return c.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Ping() error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := c.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return c.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
rows, err := c.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
rows, err := c.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := c.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Exec(query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := c.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := c.Query("SELECT name FROM system.databases ORDER BY name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "database"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTables(dbName string) ([]string, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(
|
||||
"SELECT name FROM system.tables WHERE database = '%s' ORDER BY name",
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = "SELECT database, name FROM system.tables ORDER BY database, name"
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if targetDB != "" {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "table", "table_name"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
databaseValue, hasDB := getClickHouseValueFromRow(row, "database", "schema_name")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "name", "table", "table_name")
|
||||
if hasDB && hasTable {
|
||||
result = append(result, fmt.Sprintf("%v.%v", databaseValue, tableValue))
|
||||
continue
|
||||
}
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", quoteClickHouseIdentifier(database), quoteClickHouseIdentifier(table))
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(data) == 0 {
|
||||
return "", fmt.Errorf("未找到建表语句")
|
||||
}
|
||||
row := data[0]
|
||||
if val, ok := getClickHouseValueFromRow(row, "statement", "create_statement", "sql", "query"); ok {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", val))
|
||||
if text != "" {
|
||||
return text, nil
|
||||
}
|
||||
}
|
||||
|
||||
longest := ""
|
||||
for _, value := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", value))
|
||||
if text == "" {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(strings.ToUpper(text), "CREATE ") && len(text) > len(longest) {
|
||||
longest = text
|
||||
}
|
||||
}
|
||||
if longest != "" {
|
||||
return longest, nil
|
||||
}
|
||||
return "", fmt.Errorf("未找到建表语句")
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
name,
|
||||
type,
|
||||
default_kind,
|
||||
default_expression,
|
||||
is_in_primary_key,
|
||||
is_in_sorting_key,
|
||||
comment
|
||||
FROM system.columns
|
||||
WHERE database = '%s' AND table = '%s'
|
||||
ORDER BY position`,
|
||||
escapeClickHouseSQLLiteral(database),
|
||||
escapeClickHouseSQLLiteral(table),
|
||||
)
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
columns := make([]connection.ColumnDefinition, 0, len(data))
|
||||
for _, row := range data {
|
||||
nameValue, _ := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
defaultKind, _ := getClickHouseValueFromRow(row, "default_kind")
|
||||
defaultExpr, hasDefault := getClickHouseValueFromRow(row, "default_expression", "column_default")
|
||||
commentValue, _ := getClickHouseValueFromRow(row, "comment")
|
||||
inPrimary, _ := getClickHouseValueFromRow(row, "is_in_primary_key")
|
||||
inSorting, _ := getClickHouseValueFromRow(row, "is_in_sorting_key")
|
||||
|
||||
colType := strings.TrimSpace(fmt.Sprintf("%v", typeValue))
|
||||
nullable := "NO"
|
||||
if strings.HasPrefix(strings.ToLower(colType), "nullable(") {
|
||||
nullable = "YES"
|
||||
}
|
||||
|
||||
key := ""
|
||||
if isClickHouseTruthy(inPrimary) {
|
||||
key = "PRI"
|
||||
} else if isClickHouseTruthy(inSorting) {
|
||||
key = "MUL"
|
||||
}
|
||||
|
||||
extra := ""
|
||||
kindText := strings.ToUpper(strings.TrimSpace(fmt.Sprintf("%v", defaultKind)))
|
||||
if kindText != "" && kindText != "DEFAULT" {
|
||||
extra = kindText
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinition{
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: colType,
|
||||
Nullable: nullable,
|
||||
Key: key,
|
||||
Extra: extra,
|
||||
Comment: strings.TrimSpace(fmt.Sprintf("%v", commentValue)),
|
||||
}
|
||||
if hasDefault && defaultExpr != nil {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", defaultExpr))
|
||||
if text != "" {
|
||||
col.Default = &text
|
||||
}
|
||||
}
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database = '%s'
|
||||
ORDER BY table, position`,
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = `
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA')
|
||||
ORDER BY database, table, position`
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]connection.ColumnDefinitionWithTable, 0, len(data))
|
||||
for _, row := range data {
|
||||
databaseValue, _ := getClickHouseValueFromRow(row, "database")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "table", "table_name")
|
||||
nameValue, hasName := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
if !hasTable || !hasName {
|
||||
continue
|
||||
}
|
||||
|
||||
tableName := strings.TrimSpace(fmt.Sprintf("%v", tableValue))
|
||||
if targetDB == "" {
|
||||
dbText := strings.TrimSpace(fmt.Sprintf("%v", databaseValue))
|
||||
if dbText != "" {
|
||||
tableName = dbText + "." + tableName
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: strings.TrimSpace(fmt.Sprintf("%v", typeValue)),
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) resolveDatabaseAndTable(dbName, tableName string) (string, string, error) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
if rawTable == "" {
|
||||
return "", "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
resolvedDB := strings.TrimSpace(dbName)
|
||||
resolvedTable := rawTable
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
if dbPart := normalizeClickHouseIdentifierPart(parts[0]); dbPart != "" {
|
||||
resolvedDB = dbPart
|
||||
}
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(parts[1])
|
||||
} else {
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(rawTable)
|
||||
}
|
||||
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = defaultClickHouseDatabase
|
||||
}
|
||||
if resolvedTable == "" {
|
||||
return "", "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
return resolvedDB, resolvedTable, nil
|
||||
}
|
||||
|
||||
func normalizeClickHouseIdentifierPart(raw string) string {
|
||||
text := strings.TrimSpace(raw)
|
||||
if len(text) >= 2 {
|
||||
first := text[0]
|
||||
last := text[len(text)-1]
|
||||
if (first == '`' && last == '`') || (first == '"' && last == '"') {
|
||||
text = text[1 : len(text)-1]
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(text)
|
||||
}
|
||||
|
||||
func quoteClickHouseIdentifier(raw string) string {
|
||||
return "`" + strings.ReplaceAll(strings.TrimSpace(raw), "`", "``") + "`"
|
||||
}
|
||||
|
||||
func escapeClickHouseSQLLiteral(raw string) string {
|
||||
return strings.ReplaceAll(strings.TrimSpace(raw), "'", "''")
|
||||
}
|
||||
|
||||
func getClickHouseValueFromRow(row map[string]interface{}, keys ...string) (interface{}, bool) {
|
||||
if len(row) == 0 {
|
||||
return nil, false
|
||||
}
|
||||
for _, key := range keys {
|
||||
if value, ok := row[key]; ok {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
for existingKey, value := range row {
|
||||
for _, key := range keys {
|
||||
if strings.EqualFold(existingKey, key) {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func isClickHouseTruthy(value interface{}) bool {
|
||||
switch val := value.(type) {
|
||||
case bool:
|
||||
return val
|
||||
case int:
|
||||
return val != 0
|
||||
case int8:
|
||||
return val != 0
|
||||
case int16:
|
||||
return val != 0
|
||||
case int32:
|
||||
return val != 0
|
||||
case int64:
|
||||
return val != 0
|
||||
case uint:
|
||||
return val != 0
|
||||
case uint8:
|
||||
return val != 0
|
||||
case uint16:
|
||||
return val != 0
|
||||
case uint32:
|
||||
return val != 0
|
||||
case uint64:
|
||||
return val != 0
|
||||
case string:
|
||||
normalized := strings.ToLower(strings.TrimSpace(val))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
default:
|
||||
normalized := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", value)))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
database, table, err := c.resolveDatabaseAndTable(c.database, tableName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
qualifiedTable := fmt.Sprintf("%s.%s", quoteClickHouseIdentifier(database), quoteClickHouseIdentifier(table))
|
||||
|
||||
for _, pk := range changes.Deletes {
|
||||
whereExpr := buildClickHouseWhereClause(pk)
|
||||
if whereExpr == "" {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("ALTER TABLE %s DELETE WHERE %s", qualifiedTable, whereExpr)
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("delete error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
for _, update := range changes.Updates {
|
||||
setExpr := buildClickHouseAssignments(update.Values)
|
||||
whereExpr := buildClickHouseWhereClause(update.Keys)
|
||||
if setExpr == "" || whereExpr == "" {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("ALTER TABLE %s UPDATE %s WHERE %s", qualifiedTable, setExpr, whereExpr)
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("update error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
for _, row := range changes.Inserts {
|
||||
query, err := buildClickHouseInsertSQL(qualifiedTable, row)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if query == "" {
|
||||
continue
|
||||
}
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("插入失败:%v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildClickHouseInsertSQL(qualifiedTable string, row map[string]interface{}) (string, error) {
|
||||
if len(row) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
cols := make([]string, 0, len(row))
|
||||
for k := range row {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
if len(cols) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
sort.Strings(cols)
|
||||
quotedCols := make([]string, 0, len(cols))
|
||||
values := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
quotedCols = append(quotedCols, quoteClickHouseIdentifier(col))
|
||||
values = append(values, clickHouseLiteral(row[col]))
|
||||
}
|
||||
return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(quotedCols, ", "), strings.Join(values, ", ")), nil
|
||||
}
|
||||
|
||||
func buildClickHouseAssignments(values map[string]interface{}) string {
|
||||
if len(values) == 0 {
|
||||
return ""
|
||||
}
|
||||
cols := make([]string, 0, len(values))
|
||||
for k := range values {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
sort.Strings(cols)
|
||||
parts := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", quoteClickHouseIdentifier(col), clickHouseLiteral(values[col])))
|
||||
}
|
||||
return strings.Join(parts, ", ")
|
||||
}
|
||||
|
||||
func buildClickHouseWhereClause(keys map[string]interface{}) string {
|
||||
if len(keys) == 0 {
|
||||
return ""
|
||||
}
|
||||
cols := make([]string, 0, len(keys))
|
||||
for k := range keys {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
sort.Strings(cols)
|
||||
parts := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", quoteClickHouseIdentifier(col), clickHouseLiteral(keys[col])))
|
||||
}
|
||||
return strings.Join(parts, " AND ")
|
||||
}
|
||||
|
||||
func clickHouseLiteral(value interface{}) string {
|
||||
switch val := value.(type) {
|
||||
case nil:
|
||||
return "NULL"
|
||||
case bool:
|
||||
if val {
|
||||
return "1"
|
||||
}
|
||||
return "0"
|
||||
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case time.Time:
|
||||
return fmt.Sprintf("'%s'", val.Format("2006-01-02 15:04:05"))
|
||||
case []byte:
|
||||
return fmt.Sprintf("'%s'", strings.ReplaceAll(string(val), "'", "''"))
|
||||
default:
|
||||
return fmt.Sprintf("'%s'", strings.ReplaceAll(fmt.Sprintf("%v", val), "'", "''"))
|
||||
}
|
||||
}
|
||||
@@ -47,7 +47,7 @@ func (c *CustomDB) Close() error {
|
||||
|
||||
func (c *CustomDB) Ping() error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := c.pingTimeout
|
||||
if timeout <= 0 {
|
||||
@@ -60,7 +60,7 @@ func (c *CustomDB) Ping() error {
|
||||
|
||||
func (c *CustomDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := c.conn.QueryContext(ctx, query)
|
||||
@@ -74,7 +74,7 @@ func (c *CustomDB) QueryContext(ctx context.Context, query string) ([]map[string
|
||||
|
||||
func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := c.conn.Query(query)
|
||||
@@ -87,7 +87,7 @@ func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
func (c *CustomDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := c.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -98,7 +98,7 @@ func (c *CustomDB) ExecContext(ctx context.Context, query string) (int64, error)
|
||||
|
||||
func (c *CustomDB) Exec(query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := c.conn.Exec(query)
|
||||
if err != nil {
|
||||
@@ -249,7 +249,7 @@ func (c *CustomDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
|
||||
func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
tx, err := c.conn.Begin()
|
||||
@@ -321,7 +321,7 @@ func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("删除失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -349,12 +349,12 @@ func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
return fmt.Errorf("更新操作需要主键条件")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("更新失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,7 +378,7 @@ func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("插入失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,14 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
if config.Database != "" {
|
||||
q.Set("schema", config.Database)
|
||||
}
|
||||
if config.UseSSL {
|
||||
if certPath := strings.TrimSpace(config.SSLCertPath); certPath != "" {
|
||||
q.Set("SSL_CERT_PATH", certPath)
|
||||
}
|
||||
if keyPath := strings.TrimSpace(config.SSLKeyPath); keyPath != "" {
|
||||
q.Set("SSL_KEY_PATH", keyPath)
|
||||
}
|
||||
}
|
||||
if escapedPassword != config.Password {
|
||||
// 达梦驱动要求:密码包含特殊字符时,password 需 PathEscape,并添加 escapeProcess=true 让驱动解码。
|
||||
q.Set("escapeProcess", "true")
|
||||
@@ -50,8 +58,12 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
if runConfig.UseSSL {
|
||||
if strings.TrimSpace(runConfig.SSLCertPath) == "" || strings.TrimSpace(runConfig.SSLKeyPath) == "" {
|
||||
return fmt.Errorf("达梦启用 SSL 需要同时配置证书路径(sslCertPath)与私钥路径(sslKeyPath)")
|
||||
}
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -80,22 +92,37 @@ func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = d.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("达梦数据库通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = d.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(config)
|
||||
if err := d.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := d.getDSN(attempt)
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := d.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
d.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("达梦 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (d *DamengDB) Close() error {
|
||||
@@ -116,7 +143,7 @@ func (d *DamengDB) Close() error {
|
||||
|
||||
func (d *DamengDB) Ping() error {
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := d.pingTimeout
|
||||
if timeout <= 0 {
|
||||
@@ -129,7 +156,7 @@ func (d *DamengDB) Ping() error {
|
||||
|
||||
func (d *DamengDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := d.conn.QueryContext(ctx, query)
|
||||
@@ -143,7 +170,7 @@ func (d *DamengDB) QueryContext(ctx context.Context, query string) ([]map[string
|
||||
|
||||
func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := d.conn.Query(query)
|
||||
@@ -156,7 +183,7 @@ func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
func (d *DamengDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := d.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -167,7 +194,7 @@ func (d *DamengDB) ExecContext(ctx context.Context, query string) (int64, error)
|
||||
|
||||
func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := d.conn.Exec(query)
|
||||
if err != nil {
|
||||
@@ -177,22 +204,9 @@ func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetDatabases() ([]string, error) {
|
||||
// DM: List Users/Schemas
|
||||
data, _, err := d.Query("SELECT username FROM dba_users")
|
||||
if err != nil {
|
||||
// Fallback if dba_users not accessible
|
||||
data, _, err = d.Query("SELECT username FROM all_users")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["USERNAME"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
// 达梦在本项目中将 schema/owner 作为“数据库”展示口径。
|
||||
// 先查当前 schema / 当前用户,再聚合可见用户与 owner,避免权限受限时返回空列表。
|
||||
return collectDamengDatabaseNames(d.Query)
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
@@ -246,7 +260,7 @@ func (d *DamengDB) GetCreateStatement(dbName, tableName string) (string, error)
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
return "", fmt.Errorf("未找到建表语句")
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
@@ -376,7 +390,7 @@ func (d *DamengDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
|
||||
func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
tx, err := d.conn.Begin()
|
||||
@@ -424,7 +438,7 @@ func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("删除失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,12 +466,12 @@ func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
return fmt.Errorf("更新操作需要主键条件")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("更新失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -481,7 +495,7 @@ func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("插入失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
91
internal/db/dameng_metadata.go
Normal file
91
internal/db/dameng_metadata.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var damengDatabaseQueries = []string{
|
||||
"SELECT SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') AS DATABASE_NAME FROM DUAL",
|
||||
"SELECT SYS_CONTEXT('USERENV', 'CURRENT_USER') AS DATABASE_NAME FROM DUAL",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM USER_USERS",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM ALL_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM SYS.DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT DISTINCT OWNER AS DATABASE_NAME FROM ALL_OBJECTS ORDER BY OWNER",
|
||||
"SELECT DISTINCT OWNER AS DATABASE_NAME FROM ALL_TABLES ORDER BY OWNER",
|
||||
}
|
||||
|
||||
type damengQueryFunc func(query string) ([]map[string]interface{}, []string, error)
|
||||
|
||||
func collectDamengDatabaseNames(query damengQueryFunc) ([]string, error) {
|
||||
seen := make(map[string]struct{})
|
||||
dbs := make([]string, 0, 64)
|
||||
var lastErr error
|
||||
|
||||
for _, q := range damengDatabaseQueries {
|
||||
data, _, err := query(q)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
for _, row := range data {
|
||||
name := getDamengRowString(row,
|
||||
"DATABASE_NAME",
|
||||
"USERNAME",
|
||||
"OWNER",
|
||||
"SCHEMA_NAME",
|
||||
"CURRENT_SCHEMA",
|
||||
"CURRENT_USER",
|
||||
)
|
||||
if name == "" {
|
||||
for _, v := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
continue
|
||||
}
|
||||
name = text
|
||||
break
|
||||
}
|
||||
}
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
key := strings.ToUpper(name)
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
dbs = append(dbs, name)
|
||||
}
|
||||
}
|
||||
|
||||
if len(dbs) == 0 && lastErr != nil {
|
||||
return nil, lastErr
|
||||
}
|
||||
|
||||
sort.Slice(dbs, func(i, j int) bool {
|
||||
return strings.ToUpper(dbs[i]) < strings.ToUpper(dbs[j])
|
||||
})
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func getDamengRowString(row map[string]interface{}, keys ...string) string {
|
||||
if len(row) == 0 {
|
||||
return ""
|
||||
}
|
||||
for _, key := range keys {
|
||||
for k, v := range row {
|
||||
if !strings.EqualFold(strings.TrimSpace(k), strings.TrimSpace(key)) {
|
||||
continue
|
||||
}
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
return ""
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
73
internal/db/dameng_metadata_test.go
Normal file
73
internal/db/dameng_metadata_test.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCollectDamengDatabaseNames_UsesCurrentSchemaFallback(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
switch query {
|
||||
case damengDatabaseQueries[0]:
|
||||
return []map[string]interface{}{{"DATABASE_NAME": "APP_SCHEMA"}}, nil, nil
|
||||
case damengDatabaseQueries[1]:
|
||||
return []map[string]interface{}{{"DATABASE_NAME": "app_schema"}}, nil, nil
|
||||
default:
|
||||
return nil, nil, errors.New("permission denied")
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("collectDamengDatabaseNames 返回错误: %v", err)
|
||||
}
|
||||
|
||||
want := []string{"APP_SCHEMA"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected database names, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCollectDamengDatabaseNames_CollectsOwnersWhenVisible(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
switch query {
|
||||
case damengDatabaseQueries[0], damengDatabaseQueries[1], damengDatabaseQueries[2], damengDatabaseQueries[3], damengDatabaseQueries[4], damengDatabaseQueries[5]:
|
||||
return []map[string]interface{}{}, nil, nil
|
||||
case damengDatabaseQueries[6]:
|
||||
return []map[string]interface{}{{"OWNER": "BIZ"}, {"OWNER": "audit"}}, nil, nil
|
||||
case damengDatabaseQueries[7]:
|
||||
return []map[string]interface{}{{"OWNER": "BIZ"}}, nil, nil
|
||||
default:
|
||||
return nil, nil, nil
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("collectDamengDatabaseNames 返回错误: %v", err)
|
||||
}
|
||||
|
||||
want := []string{"audit", "BIZ"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected database names, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCollectDamengDatabaseNames_ReturnsErrorWhenNoNameResolved(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expectErr := errors.New("last query failed")
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
if query == damengDatabaseQueries[len(damengDatabaseQueries)-1] {
|
||||
return nil, nil, expectErr
|
||||
}
|
||||
return nil, nil, errors.New("permission denied")
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatalf("期望返回错误,实际 got=%v", got)
|
||||
}
|
||||
if !errors.Is(err, expectErr) {
|
||||
t.Fatalf("错误不符合预期: %v", err)
|
||||
}
|
||||
}
|
||||
@@ -2,27 +2,58 @@ package db
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Database 定义了统一的数据源访问接口。
|
||||
// 所有数据库驱动(MySQL、PostgreSQL、Oracle 等)均需实现此接口。
|
||||
// 方法调用方可通过 NewDatabase 工厂函数获取对应驱动的实例。
|
||||
type Database interface {
|
||||
// Connect 根据连接配置建立数据库连接。
|
||||
Connect(config connection.ConnectionConfig) error
|
||||
// Close 关闭数据库连接并释放底层资源。
|
||||
Close() error
|
||||
// Ping 测试连接是否仍然可用。
|
||||
Ping() error
|
||||
// Query 执行查询语句,返回结果行(列名→值映射)和列名列表。
|
||||
Query(query string) ([]map[string]interface{}, []string, error)
|
||||
// Exec 执行非查询语句(INSERT/UPDATE/DELETE 等),返回受影响行数。
|
||||
Exec(query string) (int64, error)
|
||||
// GetDatabases 返回当前连接可访问的数据库列表。
|
||||
GetDatabases() ([]string, error)
|
||||
// GetTables 返回指定数据库下的表列表。
|
||||
GetTables(dbName string) ([]string, error)
|
||||
// GetCreateStatement 返回指定表的建表 DDL 语句。
|
||||
GetCreateStatement(dbName, tableName string) (string, error)
|
||||
// GetColumns 返回指定表的列定义列表。
|
||||
GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error)
|
||||
// GetAllColumns 返回指定数据库下所有表的列定义(含表名标识)。
|
||||
GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error)
|
||||
// GetIndexes 返回指定表的索引定义列表。
|
||||
GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error)
|
||||
// GetForeignKeys 返回指定表的外键定义列表。
|
||||
GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error)
|
||||
// GetTriggers 返回指定表的触发器定义列表。
|
||||
GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error)
|
||||
}
|
||||
|
||||
// MultiResultQuerier 是可选接口,支持多结果集的驱动实现此接口。
|
||||
// 执行可能包含多条 SQL 语句的查询,返回所有结果集。
|
||||
type MultiResultQuerier interface {
|
||||
QueryMulti(query string) ([]connection.ResultSetData, error)
|
||||
}
|
||||
|
||||
// MultiResultQuerierContext 是带 context 的多结果集查询接口。
|
||||
type MultiResultQuerierContext interface {
|
||||
QueryMultiContext(ctx context.Context, query string) ([]connection.ResultSetData, error)
|
||||
}
|
||||
|
||||
// BatchApplier 定义了批量变更提交接口。
|
||||
// 支持批量编辑的驱动实现此接口,用于一次性提交前端 DataGrid 中的增删改操作。
|
||||
type BatchApplier interface {
|
||||
// ApplyChanges 将一组变更(新增、修改、删除)批量提交到指定表。
|
||||
ApplyChanges(tableName string, changes connection.ChangeSet) error
|
||||
}
|
||||
|
||||
@@ -72,7 +103,9 @@ func normalizeDatabaseType(dbType string) string {
|
||||
}
|
||||
}
|
||||
|
||||
// Factory
|
||||
// NewDatabase 根据数据库类型创建对应的 Database 实例。
|
||||
// dbType 为数据库类型标识(如 "mysql"、"postgres"、"oracle" 等),大小写不敏感。
|
||||
// 如果指定类型未注册,返回错误。
|
||||
func NewDatabase(dbType string) (Database, error) {
|
||||
normalized := normalizeDatabaseType(dbType)
|
||||
if normalized == "" {
|
||||
@@ -80,7 +113,7 @@ func NewDatabase(dbType string) (Database, error) {
|
||||
}
|
||||
factory, ok := databaseFactories[normalized]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||
return nil, fmt.Errorf("不支持的数据库类型:%s", dbType)
|
||||
}
|
||||
return factory(), nil
|
||||
}
|
||||
|
||||
@@ -15,4 +15,5 @@ func registerOptionalDatabaseFactories() {
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("vastbase"), "vastbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mongodb"), "mongodb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("tdengine"), "tdengine")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("clickhouse"), "clickhouse")
|
||||
}
|
||||
|
||||
@@ -15,4 +15,5 @@ func registerOptionalDatabaseFactories() {
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("vastbase"), "vastbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mongodb"), "mongodb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("tdengine"), "tdengine")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("clickhouse"), "clickhouse")
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
@@ -21,7 +20,7 @@ const (
|
||||
defaultDirosPort = 9030
|
||||
)
|
||||
|
||||
// DirosDB 使用独立 driver 名称(diros)接入,底层协议兼容 MySQL。
|
||||
// DirosDB 使用独立 driver 名称(diros)接入,底层协议兼容 MySQL(对外显示为 Doris)。
|
||||
type DirosDB struct {
|
||||
MySQLDB
|
||||
}
|
||||
@@ -135,25 +134,26 @@ func collectDirosAddresses(config connection.ConnectionConfig) []string {
|
||||
return result
|
||||
}
|
||||
|
||||
func (d *DirosDB) getDSN(config connection.ConnectionConfig) string {
|
||||
func (d *DirosDB) getDSN(config connection.ConnectionConfig) (string, error) {
|
||||
database := config.Database
|
||||
protocol := "tcp"
|
||||
address := normalizeMySQLAddress(config.Host, config.Port)
|
||||
|
||||
if config.UseSSH {
|
||||
netName, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
protocol = netName
|
||||
address = normalizeMySQLAddress(config.Host, config.Port)
|
||||
} else {
|
||||
logger.Warnf("注册 Diros SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
protocol = netName
|
||||
}
|
||||
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
tlsMode := resolveMySQLTLSMode(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
return fmt.Sprintf(
|
||||
"%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds&tls=%s&multiStatements=true",
|
||||
config.User, config.Password, protocol, address, database, timeout, url.QueryEscape(tlsMode),
|
||||
), nil
|
||||
}
|
||||
|
||||
func resolveDirosCredential(config connection.ConnectionConfig, addressIndex int) (string, string) {
|
||||
@@ -177,7 +177,7 @@ func (d *DirosDB) Connect(config connection.ConnectionConfig) error {
|
||||
runConfig := applyDirosURI(config)
|
||||
addresses := collectDirosAddresses(runConfig)
|
||||
if len(addresses) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Diros 地址")
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Doris 地址")
|
||||
}
|
||||
|
||||
var errorDetails []string
|
||||
@@ -191,7 +191,11 @@ func (d *DirosDB) Connect(config connection.ConnectionConfig) error {
|
||||
candidateConfig.Port = port
|
||||
candidateConfig.User, candidateConfig.Password = resolveDirosCredential(runConfig, index)
|
||||
|
||||
dsn := d.getDSN(candidateConfig)
|
||||
dsn, err := d.getDSN(candidateConfig)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s 生成连接串失败: %v", address, err))
|
||||
continue
|
||||
}
|
||||
db, err := sql.Open(dirosDriverName, dsn)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s 打开失败: %v", address, err))
|
||||
@@ -214,7 +218,7 @@ func (d *DirosDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
|
||||
if len(errorDetails) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Diros 地址")
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Doris 地址")
|
||||
}
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(errorDetails, ";"))
|
||||
}
|
||||
|
||||
74
internal/db/driver_agent_binary_check.go
Normal file
74
internal/db/driver_agent_binary_check.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"debug/pe"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
peMachineI386 uint16 = 0x014c
|
||||
peMachineAmd64 uint16 = 0x8664
|
||||
peMachineArm64 uint16 = 0xaa64
|
||||
)
|
||||
|
||||
func windowsMachineLabel(machine uint16) string {
|
||||
switch machine {
|
||||
case peMachineI386:
|
||||
return "windows-386"
|
||||
case peMachineAmd64:
|
||||
return "windows-amd64"
|
||||
case peMachineArm64:
|
||||
return "windows-arm64"
|
||||
default:
|
||||
return fmt.Sprintf("windows-unknown(0x%04x)", machine)
|
||||
}
|
||||
}
|
||||
|
||||
func expectedWindowsMachineForGoArch(goarch string) (uint16, string, bool) {
|
||||
switch strings.ToLower(strings.TrimSpace(goarch)) {
|
||||
case "386":
|
||||
return peMachineI386, "windows-386", true
|
||||
case "amd64":
|
||||
return peMachineAmd64, "windows-amd64", true
|
||||
case "arm64":
|
||||
return peMachineArm64, "windows-arm64", true
|
||||
default:
|
||||
return 0, "", false
|
||||
}
|
||||
}
|
||||
|
||||
func validateWindowsExecutableMachine(pathText string) error {
|
||||
file, err := pe.Open(pathText)
|
||||
if err != nil {
|
||||
return fmt.Errorf("无法识别为有效的 Windows 可执行文件:%w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
expectedMachine, expectedLabel, ok := expectedWindowsMachineForGoArch(runtime.GOARCH)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
actualMachine := file.FileHeader.Machine
|
||||
if actualMachine != expectedMachine {
|
||||
return fmt.Errorf("可执行文件架构不兼容(文件=%s,当前进程=%s)", windowsMachineLabel(actualMachine), expectedLabel)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateOptionalDriverAgentExecutable 校验可选驱动代理二进制是否可在当前进程中执行。
|
||||
// 当前主要用于 Windows 下的 PE 架构兼容性校验,避免升级后复用到错误架构的旧代理。
|
||||
func ValidateOptionalDriverAgentExecutable(driverType string, executablePath string) error {
|
||||
pathText := strings.TrimSpace(executablePath)
|
||||
if pathText == "" {
|
||||
return fmt.Errorf("%s 驱动代理路径为空", driverDisplayName(driverType))
|
||||
}
|
||||
if runtime.GOOS != "windows" {
|
||||
return nil
|
||||
}
|
||||
if err := validateWindowsExecutableMachine(pathText); err != nil {
|
||||
return fmt.Errorf("%s 驱动代理不可用:%w", driverDisplayName(driverType), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
// coreBuiltinDrivers 是始终内置可用的核心驱动,无需额外安装即可使用。
|
||||
var coreBuiltinDrivers = map[string]struct{}{
|
||||
"mysql": {},
|
||||
"redis": {},
|
||||
@@ -18,18 +19,19 @@ var coreBuiltinDrivers = map[string]struct{}{
|
||||
// optionalGoDrivers 表示需要用户“安装启用”后才能使用的纯 Go 驱动。
|
||||
// 注意:这是一种运行时门控(installed.json 标记),并不减少主二进制体积。
|
||||
var optionalGoDrivers = map[string]struct{}{
|
||||
"mariadb": {},
|
||||
"diros": {},
|
||||
"sphinx": {},
|
||||
"sqlserver": {},
|
||||
"sqlite": {},
|
||||
"duckdb": {},
|
||||
"dameng": {},
|
||||
"kingbase": {},
|
||||
"highgo": {},
|
||||
"vastbase": {},
|
||||
"mongodb": {},
|
||||
"tdengine": {},
|
||||
"mariadb": {},
|
||||
"diros": {},
|
||||
"sphinx": {},
|
||||
"sqlserver": {},
|
||||
"sqlite": {},
|
||||
"duckdb": {},
|
||||
"dameng": {},
|
||||
"kingbase": {},
|
||||
"highgo": {},
|
||||
"vastbase": {},
|
||||
"mongodb": {},
|
||||
"tdengine": {},
|
||||
"clickhouse": {},
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -60,7 +62,7 @@ func driverDisplayName(driverType string) string {
|
||||
case "mariadb":
|
||||
return "MariaDB"
|
||||
case "diros":
|
||||
return "Diros"
|
||||
return "Doris"
|
||||
case "sphinx":
|
||||
return "Sphinx"
|
||||
case "postgres":
|
||||
@@ -83,11 +85,15 @@ func driverDisplayName(driverType string) string {
|
||||
return "MongoDB"
|
||||
case "tdengine":
|
||||
return "TDengine"
|
||||
case "clickhouse":
|
||||
return "ClickHouse"
|
||||
default:
|
||||
return strings.ToUpper(strings.TrimSpace(driverType))
|
||||
}
|
||||
}
|
||||
|
||||
// IsOptionalGoDriver 返回指定驱动类型是否为可选的纯 Go 驱动。
|
||||
// 可选驱动需要用户在驱动管理界面点击“安装启用”后才能使用。
|
||||
func IsOptionalGoDriver(driverType string) bool {
|
||||
_, ok := optionalGoDrivers[normalizeRuntimeDriverType(driverType)]
|
||||
return ok
|
||||
@@ -97,6 +103,7 @@ func IsOptionalGoDriverBuildIncluded(driverType string) bool {
|
||||
return optionalGoDriverBuildIncluded(normalizeRuntimeDriverType(driverType))
|
||||
}
|
||||
|
||||
// IsBuiltinDriver 返回指定驱动类型是否为核心内置驱动(始终可用,无需安装)。
|
||||
func IsBuiltinDriver(driverType string) bool {
|
||||
_, ok := coreBuiltinDrivers[normalizeRuntimeDriverType(driverType)]
|
||||
return ok
|
||||
@@ -143,6 +150,8 @@ func currentExternalDriverDownloadDirectory() string {
|
||||
return defaultExternalDriverDownloadDirectory()
|
||||
}
|
||||
|
||||
// SetExternalDriverDownloadDirectory 设置可选驱动的下载存储目录。
|
||||
// 如果路径解析失败,会回退到默认目录(~/.gonavi/drivers)。
|
||||
func SetExternalDriverDownloadDirectory(downloadDir string) {
|
||||
root, err := resolveExternalDriverRoot(downloadDir)
|
||||
if err != nil {
|
||||
@@ -191,6 +200,9 @@ func optionalGoDriverRuntimeReady(driverType string) (bool, string) {
|
||||
if statErr != nil || info.IsDir() {
|
||||
return false, fmt.Sprintf("%s 驱动代理缺失,请在驱动管理中重新安装启用", driverDisplayName(normalized))
|
||||
}
|
||||
if validateErr := ValidateOptionalDriverAgentExecutable(normalized, executablePath); validateErr != nil {
|
||||
return false, fmt.Sprintf("%s;请在驱动管理中重新安装启用", validateErr.Error())
|
||||
}
|
||||
return true, ""
|
||||
}
|
||||
|
||||
|
||||
@@ -65,11 +65,22 @@ func TestManagedDriverRequiresInstallMarker(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("解析 mariadb 代理路径失败: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(executablePath, []byte("placeholder"), 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位文件失败: %v", err)
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
_ = os.Chmod(executablePath, 0o644)
|
||||
selfPath, selfErr := os.Executable()
|
||||
if selfErr != nil {
|
||||
t.Fatalf("获取测试进程路径失败: %v", selfErr)
|
||||
}
|
||||
content, readErr := os.ReadFile(selfPath)
|
||||
if readErr != nil {
|
||||
t.Fatalf("读取测试进程失败: %v", readErr)
|
||||
}
|
||||
if err := os.WriteFile(executablePath, content, 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位可执行文件失败: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err := os.WriteFile(executablePath, []byte("placeholder"), 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位文件失败: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
supported, reason := DriverRuntimeSupportStatus("mariadb")
|
||||
|
||||
@@ -5,6 +5,7 @@ package db
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
@@ -32,6 +33,44 @@ func TestPostgresDSN_EscapesPassword(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestPostgresDSN_SSLModeRequireWhenEnabled(t *testing.T) {
|
||||
p := &PostgresDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5432,
|
||||
User: "user",
|
||||
Password: "pass",
|
||||
Database: "db",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := p.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "sslmode=require") {
|
||||
t.Fatalf("dsn 缺少 sslmode=require 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMySQLDSN_UsesTLSParamWhenSSLEnabled(t *testing.T) {
|
||||
m := &MySQLDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Password: "pass",
|
||||
Database: "db",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := m.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "tls=true") {
|
||||
t.Fatalf("dsn 缺少 tls=true 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOracleDSN_EscapesUserAndPassword(t *testing.T) {
|
||||
o := &OracleDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
@@ -81,6 +120,30 @@ func TestDamengDSN_EscapesPasswordAndEnablesEscapeProcess(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestDamengDSN_AppendsSSLCertAndKeyParams(t *testing.T) {
|
||||
d := &DamengDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "dameng",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5236,
|
||||
User: "SYSDBA",
|
||||
Password: "pass",
|
||||
Database: "DBName",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
SSLCertPath: "C:\\certs\\client-cert.pem",
|
||||
SSLKeyPath: "C:\\certs\\client-key.pem",
|
||||
}
|
||||
|
||||
dsn := d.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "SSL_CERT_PATH=") {
|
||||
t.Fatalf("dsn 缺少 SSL_CERT_PATH 参数:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "SSL_KEY_PATH=") {
|
||||
t.Fatalf("dsn 缺少 SSL_KEY_PATH 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKingbaseDSN_QuotesPasswordWithSpaces(t *testing.T) {
|
||||
k := &KingbaseDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
@@ -114,3 +177,113 @@ func TestTDengineDSN_UsesWebSocketFormat(t *testing.T) {
|
||||
t.Fatalf("tdengine dsn 格式不正确:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTDengineDSN_UsesSecureWebSocketWhenSSLEnabled(t *testing.T) {
|
||||
td := &TDengineDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "tdengine",
|
||||
Host: "127.0.0.1",
|
||||
Port: 6041,
|
||||
User: "root",
|
||||
Password: "taosdata",
|
||||
Database: "power",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := td.getDSN(cfg)
|
||||
if !strings.HasPrefix(dsn, "root:taosdata@wss(127.0.0.1:6041)/power") {
|
||||
t.Fatalf("tdengine ssl dsn 格式不正确:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSQLServerDSN_EncryptMapping(t *testing.T) {
|
||||
s := &SqlServerDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1433,
|
||||
User: "sa",
|
||||
Password: "pass",
|
||||
Database: "master",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := s.getDSN(cfg)
|
||||
if !strings.Contains(strings.ToLower(dsn), "encrypt=true") {
|
||||
t.Fatalf("sqlserver dsn 缺少 encrypt=true:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(strings.ToLower(dsn), "trustservercertificate=false") {
|
||||
t.Fatalf("sqlserver dsn 缺少 TrustServerCertificate=false:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClickHouseOptions_UsesStructuredTimeoutAndAuth(t *testing.T) {
|
||||
c := &ClickHouseDB{}
|
||||
cfg := normalizeClickHouseConfig(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Host: "127.0.0.1",
|
||||
Port: 9000,
|
||||
User: "default",
|
||||
Password: "p@ss:wo/rd",
|
||||
Database: "analytics",
|
||||
Timeout: 15,
|
||||
})
|
||||
|
||||
opts := c.buildClickHouseOptions(cfg)
|
||||
if opts == nil {
|
||||
t.Fatal("options 为空")
|
||||
}
|
||||
if len(opts.Addr) != 1 || opts.Addr[0] != "127.0.0.1:9000" {
|
||||
t.Fatalf("addr 不符合预期:%v", opts.Addr)
|
||||
}
|
||||
if opts.Auth.Username != "default" {
|
||||
t.Fatalf("username 不符合预期:%s", opts.Auth.Username)
|
||||
}
|
||||
if opts.Auth.Password != cfg.Password {
|
||||
t.Fatalf("password 不符合预期:%s", opts.Auth.Password)
|
||||
}
|
||||
if opts.Auth.Database != "analytics" {
|
||||
t.Fatalf("database 不符合预期:%s", opts.Auth.Database)
|
||||
}
|
||||
if opts.DialTimeout != 15*time.Second {
|
||||
t.Fatalf("dial timeout 不符合预期:%s", opts.DialTimeout)
|
||||
}
|
||||
if opts.ReadTimeout != minClickHouseReadTimeout {
|
||||
t.Fatalf("read timeout 不符合预期:%s", opts.ReadTimeout)
|
||||
}
|
||||
if _, ok := opts.Settings["write_timeout"]; ok {
|
||||
t.Fatalf("options 不应包含 write_timeout 设置:%v", opts.Settings)
|
||||
}
|
||||
if _, ok := opts.Settings["read_timeout"]; ok {
|
||||
t.Fatalf("options 不应通过 settings 传递 read_timeout:%v", opts.Settings)
|
||||
}
|
||||
if _, ok := opts.Settings["dial_timeout"]; ok {
|
||||
t.Fatalf("options 不应通过 settings 传递 dial_timeout:%v", opts.Settings)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClickHouseOptions_ReadTimeoutUsesLargerConfiguredTimeout(t *testing.T) {
|
||||
c := &ClickHouseDB{}
|
||||
cfg := normalizeClickHouseConfig(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Host: "127.0.0.1",
|
||||
Port: 9000,
|
||||
User: "default",
|
||||
Password: "secret",
|
||||
Database: "analytics",
|
||||
Timeout: 900,
|
||||
})
|
||||
|
||||
opts := c.buildClickHouseOptions(cfg)
|
||||
if opts == nil {
|
||||
t.Fatal("options 为空")
|
||||
}
|
||||
if opts.DialTimeout != 900*time.Second {
|
||||
t.Fatalf("dial timeout 不符合预期:%s", opts.DialTimeout)
|
||||
}
|
||||
if opts.ReadTimeout != 900*time.Second {
|
||||
t.Fatalf("read timeout 不符合预期:%s", opts.ReadTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ func (d *DuckDB) Close() error {
|
||||
|
||||
func (d *DuckDB) Ping() error {
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := d.pingTimeout
|
||||
if timeout <= 0 {
|
||||
@@ -68,7 +68,7 @@ func (d *DuckDB) Ping() error {
|
||||
|
||||
func (d *DuckDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
rows, err := d.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -80,7 +80,7 @@ func (d *DuckDB) QueryContext(ctx context.Context, query string) ([]map[string]i
|
||||
|
||||
func (d *DuckDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
rows, err := d.conn.Query(query)
|
||||
if err != nil {
|
||||
@@ -92,7 +92,7 @@ func (d *DuckDB) Query(query string) ([]map[string]interface{}, []string, error)
|
||||
|
||||
func (d *DuckDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := d.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -103,7 +103,7 @@ func (d *DuckDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
|
||||
func (d *DuckDB) Exec(query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := d.conn.Exec(query)
|
||||
if err != nil {
|
||||
@@ -174,7 +174,7 @@ ORDER BY table_schema, table_name`
|
||||
func (d *DuckDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
schema, pureTable := normalizeDuckDBSchemaAndTable(dbName, tableName)
|
||||
if pureTable == "" {
|
||||
return "", fmt.Errorf("table name required")
|
||||
return "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
escapedTable := escapeDuckDBLiteral(pureTable)
|
||||
@@ -204,13 +204,13 @@ func (d *DuckDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
return "", fmt.Errorf("未找到建表语句")
|
||||
}
|
||||
|
||||
func (d *DuckDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema, pureTable := normalizeDuckDBSchemaAndTable(dbName, tableName)
|
||||
if pureTable == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
@@ -303,7 +303,7 @@ func (d *DuckDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefi
|
||||
|
||||
func (d *DuckDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
tx, err := d.conn.Begin()
|
||||
@@ -346,7 +346,7 @@ func (d *DuckDB) ApplyChanges(tableName string, changes connection.ChangeSet) er
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("删除失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -367,12 +367,12 @@ func (d *DuckDB) ApplyChanges(tableName string, changes connection.ChangeSet) er
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
return fmt.Errorf("更新操作需要主键条件")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("更新失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -392,7 +392,7 @@ func (d *DuckDB) ApplyChanges(tableName string, changes connection.ChangeSet) er
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("插入失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ func (h *HighGoDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("sslmode", resolvePostgresSSLMode(config))
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
@@ -50,7 +50,7 @@ func (h *HighGoDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("HighGo 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
@@ -76,23 +76,37 @@ func (h *HighGoDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = h.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("HighGo 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = h.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("highgo", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
h.conn = db
|
||||
h.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := h.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := h.getDSN(attempt)
|
||||
db, err := sql.Open("highgo", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
h.conn = db
|
||||
h.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := h.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
h.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("HighGo SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Close() error {
|
||||
@@ -111,7 +125,7 @@ func (h *HighGoDB) Close() error {
|
||||
|
||||
func (h *HighGoDB) Ping() error {
|
||||
if h.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := h.pingTimeout
|
||||
if timeout <= 0 {
|
||||
@@ -124,7 +138,7 @@ func (h *HighGoDB) Ping() error {
|
||||
|
||||
func (h *HighGoDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if h.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := h.conn.QueryContext(ctx, query)
|
||||
@@ -138,7 +152,7 @@ func (h *HighGoDB) QueryContext(ctx context.Context, query string) ([]map[string
|
||||
|
||||
func (h *HighGoDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if h.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := h.conn.Query(query)
|
||||
@@ -151,7 +165,7 @@ func (h *HighGoDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
func (h *HighGoDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if h.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := h.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -162,7 +176,7 @@ func (h *HighGoDB) ExecContext(ctx context.Context, query string) (int64, error)
|
||||
|
||||
func (h *HighGoDB) Exec(query string) (int64, error) {
|
||||
if h.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := h.conn.Exec(query)
|
||||
if err != nil {
|
||||
@@ -218,7 +232,7 @@ func (h *HighGoDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefi
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
@@ -288,7 +302,7 @@ func (h *HighGoDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefin
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
@@ -393,7 +407,7 @@ func (h *HighGoDB) GetForeignKeys(dbName, tableName string) ([]connection.Foreig
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
@@ -453,7 +467,7 @@ func (h *HighGoDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
@@ -517,7 +531,7 @@ ORDER BY table_schema, table_name, ordinal_position`
|
||||
|
||||
func (h *HighGoDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if h.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
tx, err := h.conn.Begin()
|
||||
@@ -565,7 +579,7 @@ func (h *HighGoDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("删除失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -593,12 +607,12 @@ func (h *HighGoDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
return fmt.Errorf("更新操作需要主键条件")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("更新失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -622,7 +636,7 @@ func (h *HighGoDB) ApplyChanges(tableName string, changes connection.ChangeSet)
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("插入失败:%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
53
internal/db/json_decode.go
Normal file
53
internal/db/json_decode.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
func decodeJSONWithUseNumber(data []byte, out interface{}) error {
|
||||
if out == nil {
|
||||
return nil
|
||||
}
|
||||
decoder := json.NewDecoder(bytes.NewReader(data))
|
||||
decoder.UseNumber()
|
||||
if err := decoder.Decode(out); err != nil {
|
||||
return err
|
||||
}
|
||||
normalizeDecodedJSONNumbers(out)
|
||||
return nil
|
||||
}
|
||||
|
||||
func normalizeDecodedJSONNumbers(out interface{}) {
|
||||
switch typed := out.(type) {
|
||||
case *[]map[string]interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
for i := range *typed {
|
||||
row := (*typed)[i]
|
||||
for key, value := range row {
|
||||
row[key] = normalizeQueryValue(value)
|
||||
}
|
||||
}
|
||||
case *map[string]interface{}:
|
||||
if typed == nil || *typed == nil {
|
||||
return
|
||||
}
|
||||
for key, value := range *typed {
|
||||
(*typed)[key] = normalizeQueryValue(value)
|
||||
}
|
||||
case *[]interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
for i, item := range *typed {
|
||||
(*typed)[i] = normalizeQueryValue(item)
|
||||
}
|
||||
case *interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
*typed = normalizeQueryValue(*typed)
|
||||
}
|
||||
}
|
||||
58
internal/db/json_decode_test.go
Normal file
58
internal/db/json_decode_test.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestDecodeJSONWithUseNumber_QueryRowsPreserveUnsafeInteger(t *testing.T) {
|
||||
raw := []byte(`[{"id":9007199254740993,"safe":123,"nested":{"n":9007199254740992},"arr":[9007199254740992,1],"decimal":1.25}]`)
|
||||
var out []map[string]interface{}
|
||||
|
||||
if err := decodeJSONWithUseNumber(raw, &out); err != nil {
|
||||
t.Fatalf("解码失败: %v", err)
|
||||
}
|
||||
if len(out) != 1 {
|
||||
t.Fatalf("期望 1 行,实际 %d", len(out))
|
||||
}
|
||||
|
||||
row := out[0]
|
||||
if got, ok := row["id"].(string); !ok || got != "9007199254740993" {
|
||||
t.Fatalf("id 应为 string 且保持精度,实际=%v(%T)", row["id"], row["id"])
|
||||
}
|
||||
if got, ok := row["safe"].(int64); !ok || got != 123 {
|
||||
t.Fatalf("safe 应为 int64(123),实际=%v(%T)", row["safe"], row["safe"])
|
||||
}
|
||||
nested, ok := row["nested"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("nested 类型异常:%T", row["nested"])
|
||||
}
|
||||
if got, ok := nested["n"].(string); !ok || got != "9007199254740992" {
|
||||
t.Fatalf("nested.n 应为 string 且保持精度,实际=%v(%T)", nested["n"], nested["n"])
|
||||
}
|
||||
arr, ok := row["arr"].([]interface{})
|
||||
if !ok || len(arr) != 2 {
|
||||
t.Fatalf("arr 类型异常:%v(%T)", row["arr"], row["arr"])
|
||||
}
|
||||
if got, ok := arr[0].(string); !ok || got != "9007199254740992" {
|
||||
t.Fatalf("arr[0] 应为 string 且保持精度,实际=%v(%T)", arr[0], arr[0])
|
||||
}
|
||||
if got, ok := arr[1].(int64); !ok || got != 1 {
|
||||
t.Fatalf("arr[1] 应为 int64(1),实际=%v(%T)", arr[1], arr[1])
|
||||
}
|
||||
if got, ok := row["decimal"].(float64); !ok || got != 1.25 {
|
||||
t.Fatalf("decimal 应为 float64(1.25),实际=%v(%T)", row["decimal"], row["decimal"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeJSONWithUseNumber_TypedStruct(t *testing.T) {
|
||||
type item struct {
|
||||
ID int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
var out []item
|
||||
if err := decodeJSONWithUseNumber([]byte(`[{"id":7,"name":"ok"}]`), &out); err != nil {
|
||||
t.Fatalf("解码失败: %v", err)
|
||||
}
|
||||
if len(out) != 1 || out[0].ID != 7 || out[0].Name != "ok" {
|
||||
t.Fatalf("结构体解码结果异常:%+v", out)
|
||||
}
|
||||
}
|
||||
206
internal/db/kingbase_identifier_utils.go
Normal file
206
internal/db/kingbase_identifier_utils.go
Normal file
@@ -0,0 +1,206 @@
|
||||
package db
|
||||
|
||||
import "strings"
|
||||
|
||||
func normalizeKingbaseIdentCommon(raw string) string {
|
||||
value := strings.TrimSpace(raw)
|
||||
if value == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// 兼容被多次 JSON 序列化后的转义引号:
|
||||
// \\\"schema\\\" -> \"schema\" -> "schema"
|
||||
for i := 0; i < 8; i++ {
|
||||
next := strings.TrimSpace(value)
|
||||
next = strings.ReplaceAll(next, `\\\"`, `\"`)
|
||||
next = strings.ReplaceAll(next, `\"`, `"`)
|
||||
if next == value {
|
||||
break
|
||||
}
|
||||
value = next
|
||||
}
|
||||
value = strings.TrimSpace(value)
|
||||
|
||||
stripWrapperOnce := func(text string) string {
|
||||
t := strings.TrimSpace(text)
|
||||
if strings.HasPrefix(t, `\`) && len(t) > 1 {
|
||||
t = strings.TrimSpace(strings.TrimPrefix(t, `\`))
|
||||
}
|
||||
if strings.HasSuffix(t, `\`) && len(t) > 1 {
|
||||
t = strings.TrimSpace(strings.TrimSuffix(t, `\`))
|
||||
}
|
||||
if len(t) >= 4 && strings.HasPrefix(t, `\"`) && strings.HasSuffix(t, `\"`) {
|
||||
return strings.TrimSpace(t[2 : len(t)-2])
|
||||
}
|
||||
if len(t) >= 2 && strings.HasPrefix(t, `"`) && strings.HasSuffix(t, `"`) {
|
||||
return strings.TrimSpace(t[1 : len(t)-1])
|
||||
}
|
||||
if len(t) >= 2 && strings.HasPrefix(t, "`") && strings.HasSuffix(t, "`") {
|
||||
return strings.TrimSpace(t[1 : len(t)-1])
|
||||
}
|
||||
if len(t) >= 2 && strings.HasPrefix(t, "[") && strings.HasSuffix(t, "]") {
|
||||
return strings.TrimSpace(t[1 : len(t)-1])
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
next := stripWrapperOnce(value)
|
||||
if next == value {
|
||||
break
|
||||
}
|
||||
value = next
|
||||
}
|
||||
value = strings.TrimSpace(value)
|
||||
|
||||
// 兼容错误的二次引用与残留反斜杠。
|
||||
value = strings.ReplaceAll(value, `\"`, `"`)
|
||||
value = strings.ReplaceAll(value, `""`, "")
|
||||
value = strings.TrimSpace(value)
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
next := strings.TrimSpace(value)
|
||||
changed := false
|
||||
if strings.HasPrefix(next, `\`) && len(next) > 1 {
|
||||
next = strings.TrimSpace(strings.TrimPrefix(next, `\`))
|
||||
changed = true
|
||||
}
|
||||
if strings.HasSuffix(next, `\`) && len(next) > 1 {
|
||||
next = strings.TrimSpace(strings.TrimSuffix(next, `\`))
|
||||
changed = true
|
||||
}
|
||||
if !changed || next == value {
|
||||
break
|
||||
}
|
||||
value = next
|
||||
}
|
||||
|
||||
return strings.TrimSpace(value)
|
||||
}
|
||||
|
||||
func splitKingbaseQualifiedNameCommon(raw string) (schema string, table string) {
|
||||
text := strings.TrimSpace(raw)
|
||||
if text == "" {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
sep := findKingbaseQualifiedSeparator(text)
|
||||
if sep < 0 {
|
||||
return "", normalizeKingbaseIdentCommon(text)
|
||||
}
|
||||
|
||||
schemaPart := normalizeKingbaseIdentCommon(text[:sep])
|
||||
tablePart := normalizeKingbaseIdentCommon(text[sep+1:])
|
||||
|
||||
if tablePart == "" {
|
||||
if schemaPart == "" {
|
||||
return "", normalizeKingbaseIdentCommon(text)
|
||||
}
|
||||
return "", schemaPart
|
||||
}
|
||||
if schemaPart == "" {
|
||||
return "", tablePart
|
||||
}
|
||||
return schemaPart, tablePart
|
||||
}
|
||||
|
||||
func findKingbaseQualifiedSeparator(raw string) int {
|
||||
inDouble := false
|
||||
inBacktick := false
|
||||
inBracket := false
|
||||
escaped := false
|
||||
|
||||
for i := 0; i < len(raw); i++ {
|
||||
ch := raw[i]
|
||||
if escaped {
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
|
||||
if ch == '\\' {
|
||||
escaped = true
|
||||
continue
|
||||
}
|
||||
|
||||
if inDouble {
|
||||
if ch == '"' {
|
||||
// SQL 双引号转义:"" 代表字面量 "
|
||||
if i+1 < len(raw) && raw[i+1] == '"' {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
inDouble = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if inBacktick {
|
||||
if ch == '`' {
|
||||
inBacktick = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if inBracket {
|
||||
if ch == ']' {
|
||||
inBracket = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch ch {
|
||||
case '"':
|
||||
inDouble = true
|
||||
case '`':
|
||||
inBacktick = true
|
||||
case '[':
|
||||
inBracket = true
|
||||
case '.':
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// buildKingbaseSearchPathCommon 统一构建 Kingbase search_path。
|
||||
// 返回 search_path SQL 片段和规范化后的 schema 列表(用于调试/扩展)。
|
||||
func buildKingbaseSearchPathCommon(rawSchemas []string) (string, []string) {
|
||||
if len(rawSchemas) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(rawSchemas)+1)
|
||||
quotedParts := make([]string, 0, len(rawSchemas)+1)
|
||||
normalizedSchemas := make([]string, 0, len(rawSchemas)+1)
|
||||
|
||||
appendSchema := func(raw string) {
|
||||
cleaned := normalizeKingbaseIdentCommon(raw)
|
||||
if cleaned == "" {
|
||||
return
|
||||
}
|
||||
if strings.EqualFold(cleaned, "public") {
|
||||
cleaned = "public"
|
||||
}
|
||||
key := strings.ToLower(cleaned)
|
||||
if _, ok := seen[key]; ok {
|
||||
return
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
normalizedSchemas = append(normalizedSchemas, cleaned)
|
||||
escaped := strings.ReplaceAll(cleaned, `"`, `""`)
|
||||
quotedParts = append(quotedParts, `"`+escaped+`"`)
|
||||
}
|
||||
|
||||
for _, raw := range rawSchemas {
|
||||
appendSchema(raw)
|
||||
}
|
||||
if _, ok := seen["public"]; !ok {
|
||||
appendSchema("public")
|
||||
}
|
||||
|
||||
if len(quotedParts) == 0 {
|
||||
return "", normalizedSchemas
|
||||
}
|
||||
return strings.Join(quotedParts, ", "), normalizedSchemas
|
||||
}
|
||||
92
internal/db/kingbase_identifier_utils_test.go
Normal file
92
internal/db/kingbase_identifier_utils_test.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeKingbaseIdentCommon(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want string
|
||||
}{
|
||||
{name: "plain", in: "ldf_server", want: "ldf_server"},
|
||||
{name: "quoted", in: `"ldf_server"`, want: "ldf_server"},
|
||||
{name: "escaped quoted", in: `\"ldf_server\"`, want: "ldf_server"},
|
||||
{name: "double escaped quoted", in: `\\\"ldf_server\\\"`, want: "ldf_server"},
|
||||
{name: "double quoted", in: `""ldf_server""`, want: "ldf_server"},
|
||||
{name: "backtick quoted", in: "`ldf_server`", want: "ldf_server"},
|
||||
{name: "bracket quoted", in: "[ldf_server]", want: "ldf_server"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := normalizeKingbaseIdentCommon(tt.in); got != tt.want {
|
||||
t.Fatalf("normalizeKingbaseIdentCommon(%q)=%q,want=%q", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitKingbaseQualifiedNameCommon(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
wantSchema string
|
||||
wantTable string
|
||||
}{
|
||||
{name: "plain", in: "ldf_server.andon_events", wantSchema: "ldf_server", wantTable: "andon_events"},
|
||||
{name: "quoted", in: `"ldf_server"."andon_events"`, wantSchema: "ldf_server", wantTable: "andon_events"},
|
||||
{name: "escaped quoted", in: `\"ldf_server\".\"andon_events\"`, wantSchema: "ldf_server", wantTable: "andon_events"},
|
||||
{name: "double escaped quoted", in: `\\\"ldf_server\\\".\\\"andon_events\\\"`, wantSchema: "ldf_server", wantTable: "andon_events"},
|
||||
{name: "space around dot", in: ` "ldf_server" . "andon_events" `, wantSchema: "ldf_server", wantTable: "andon_events"},
|
||||
{name: "table only", in: "andon_events", wantSchema: "", wantTable: "andon_events"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotSchema, gotTable := splitKingbaseQualifiedNameCommon(tt.in)
|
||||
if gotSchema != tt.wantSchema || gotTable != tt.wantTable {
|
||||
t.Fatalf("splitKingbaseQualifiedNameCommon(%q)=(%q,%q),want=(%q,%q)", tt.in, gotSchema, gotTable, tt.wantSchema, tt.wantTable)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildKingbaseSearchPathCommon(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in []string
|
||||
want string
|
||||
wantLen int
|
||||
}{
|
||||
{
|
||||
name: "normal schemas",
|
||||
in: []string{"ldf_server", "public"},
|
||||
want: `"ldf_server", "public"`,
|
||||
wantLen: 2,
|
||||
},
|
||||
{
|
||||
name: "quoted and escaped schemas should not be double quoted",
|
||||
in: []string{`"ldf_server"`, `""bcs_barcode""`, `\"public\"`},
|
||||
want: `"ldf_server", "bcs_barcode", "public"`,
|
||||
wantLen: 3,
|
||||
},
|
||||
{
|
||||
name: "dedupe ignoring case and keep public fallback",
|
||||
in: []string{"LDF_SERVER", "ldf_server", "PUBLIC"},
|
||||
want: `"LDF_SERVER", "public"`,
|
||||
wantLen: 2,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, parts := buildKingbaseSearchPathCommon(tt.in)
|
||||
if got != tt.want {
|
||||
t.Fatalf("buildKingbaseSearchPathCommon(%v)=%q,want=%q", tt.in, got, tt.want)
|
||||
}
|
||||
if len(parts) != tt.wantLen {
|
||||
t.Fatalf("buildKingbaseSearchPathCommon(%v) parts=%v, len=%d, wantLen=%d", tt.in, parts, len(parts), tt.wantLen)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -65,12 +66,13 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
port := config.Port
|
||||
|
||||
// Construct DSN
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d",
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=%s connect_timeout=%d",
|
||||
quoteConnValue(address),
|
||||
port,
|
||||
quoteConnValue(config.User),
|
||||
quoteConnValue(config.Password),
|
||||
quoteConnValue(config.Database),
|
||||
quoteConnValue(resolvePostgresSSLMode(config)),
|
||||
getConnectTimeoutSeconds(config),
|
||||
)
|
||||
|
||||
@@ -78,8 +80,7 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -108,23 +109,109 @@ func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = k.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("人大金仓通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = k.getDSN(config)
|
||||
}
|
||||
|
||||
// Open using "kingbase" driver
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := k.getDSN(attempt)
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
k.conn = db
|
||||
k.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := k.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
k.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("人大金仓 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
|
||||
// 获取 schema 列表以重构带有 search_path 的连接池
|
||||
searchPathStr := k.getSearchPathStr()
|
||||
if searchPathStr != "" {
|
||||
// 将 search_path 参数拼入 DSN
|
||||
finalDSN := dsn + " search_path=" + quoteConnValue(searchPathStr)
|
||||
if finalDB, err := sql.Open("kingbase", finalDSN); err == nil {
|
||||
k.pingTimeout = getConnectTimeout(attempt)
|
||||
finalDB.SetConnMaxLifetime(5 * time.Minute)
|
||||
|
||||
// 临时将 k.conn 指向 finalDB 来做 ping 测试
|
||||
oldConn := k.conn
|
||||
k.conn = finalDB
|
||||
if err := k.Ping(); err == nil {
|
||||
// 成功使用带 search_path 的连接池
|
||||
_ = oldConn.Close()
|
||||
logger.Infof("人大金仓已配置连接级 search_path:%s", searchPathStr)
|
||||
} else {
|
||||
_ = finalDB.Close()
|
||||
k.conn = oldConn
|
||||
}
|
||||
}
|
||||
}
|
||||
if searchPathStr != "" {
|
||||
timeout := k.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
if _, err := k.conn.ExecContext(ctx, fmt.Sprintf("SET search_path TO %s", searchPathStr)); err != nil {
|
||||
logger.Warnf("人大金仓显式设置 search_path 失败:%v", err)
|
||||
} else {
|
||||
logger.Infof("人大金仓已设置默认 search_path:%s", searchPathStr)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
// getSearchPathStr 查询当前数据库中所有用户 schema,配置 DSN 的 search_path。
|
||||
// KingBase 默认 search_path 为 "$user", public,对于自定义 schema 下的表不可见。
|
||||
func (k *KingbaseDB) getSearchPathStr() string {
|
||||
if k.conn == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
query := `SELECT nspname FROM pg_namespace
|
||||
WHERE nspname NOT IN ('pg_catalog', 'information_schema')
|
||||
AND nspname NOT LIKE 'pg_%'
|
||||
ORDER BY nspname`
|
||||
|
||||
rows, err := k.conn.Query(query)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
logger.Warnf("人大金仓查询用户 schema 失败,跳过 search_path 设置:%v", err)
|
||||
return ""
|
||||
}
|
||||
k.conn = db
|
||||
k.pingTimeout = getConnectTimeout(config)
|
||||
if err := k.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
defer rows.Close()
|
||||
|
||||
var rawSchemas []string
|
||||
for rows.Next() {
|
||||
var name string
|
||||
if err := rows.Scan(&name); err != nil {
|
||||
continue
|
||||
}
|
||||
name = strings.TrimSpace(name)
|
||||
if name != "" {
|
||||
rawSchemas = append(rawSchemas, name)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
||||
searchPath, _ := buildKingbaseSearchPathCommon(rawSchemas)
|
||||
return searchPath
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Close() error {
|
||||
@@ -145,7 +232,7 @@ func (k *KingbaseDB) Close() error {
|
||||
|
||||
func (k *KingbaseDB) Ping() error {
|
||||
if k.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
timeout := k.pingTimeout
|
||||
if timeout <= 0 {
|
||||
@@ -158,7 +245,7 @@ func (k *KingbaseDB) Ping() error {
|
||||
|
||||
func (k *KingbaseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if k.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := k.conn.QueryContext(ctx, query)
|
||||
@@ -172,7 +259,7 @@ func (k *KingbaseDB) QueryContext(ctx context.Context, query string) ([]map[stri
|
||||
|
||||
func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if k.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
return nil, nil, fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
rows, err := k.conn.Query(query)
|
||||
@@ -185,7 +272,7 @@ func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, er
|
||||
|
||||
func (k *KingbaseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if k.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := k.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
@@ -196,7 +283,7 @@ func (k *KingbaseDB) ExecContext(ctx context.Context, query string) (int64, erro
|
||||
|
||||
func (k *KingbaseDB) Exec(query string) (int64, error) {
|
||||
if k.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
return 0, fmt.Errorf("连接未打开")
|
||||
}
|
||||
res, err := k.conn.Exec(query)
|
||||
if err != nil {
|
||||
@@ -280,7 +367,7 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
@@ -291,10 +378,30 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s' AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(schema), esc(table))
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = '%s'
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -307,11 +414,21 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
@@ -323,7 +440,7 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection.ColumnDefinition, error) {
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
// 转义函数
|
||||
@@ -333,10 +450,30 @@ func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection
|
||||
}
|
||||
|
||||
// 使用 current_schema() 获取当前schema
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = current_schema() AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(table))
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = current_schema()
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -349,11 +486,21 @@ func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
@@ -377,7 +524,7 @@ func (k *KingbaseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDef
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
@@ -475,7 +622,7 @@ func (k *KingbaseDB) GetForeignKeys(dbName, tableName string) ([]connection.Fore
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
@@ -557,7 +704,7 @@ func (k *KingbaseDB) GetTriggers(dbName, tableName string) ([]connection.Trigger
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
return nil, fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
@@ -600,7 +747,7 @@ func (k *KingbaseDB) GetTriggers(dbName, tableName string) ([]connection.Trigger
|
||||
|
||||
func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if k.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
return fmt.Errorf("连接未打开")
|
||||
}
|
||||
|
||||
tx, err := k.conn.Begin()
|
||||
@@ -609,28 +756,16 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
schema, table := splitKingbaseQualifiedTable(tableName)
|
||||
if table == "" {
|
||||
return fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteKingbaseIdent(schema), quoteKingbaseIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
qualifiedTable = quoteKingbaseIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
@@ -640,7 +775,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
@@ -648,7 +783,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("delete error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -660,7 +795,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
@@ -671,17 +806,17 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
return fmt.Errorf("更新操作需要主键条件")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("update error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -694,7 +829,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
cols = append(cols, quoteKingbaseIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
@@ -705,13 +840,73 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("插入失败:%v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func normalizeKingbaseIdentifier(raw string) string {
|
||||
return normalizeKingbaseIdentCommon(raw)
|
||||
}
|
||||
|
||||
// kingbaseIdentNeedsQuote 判断标识符是否需要双引号包裹。
|
||||
// 与前端 sql.ts 中 needsQuote 逻辑保持一致。
|
||||
func kingbaseIdentNeedsQuote(ident string) bool {
|
||||
if ident == "" {
|
||||
return false
|
||||
}
|
||||
// 不是合法裸标识符格式(必须以字母或下划线开头,仅含字母、数字、下划线)
|
||||
if matched, _ := regexp.MatchString(`^[a-zA-Z_][a-zA-Z0-9_]*$`, ident); !matched {
|
||||
return true
|
||||
}
|
||||
// 包含大写字母时需要引号保护(KingbaseES/PostgreSQL 默认将未加引号的标识符折叠为小写)
|
||||
for _, r := range ident {
|
||||
if r >= 'A' && r <= 'Z' {
|
||||
return true
|
||||
}
|
||||
}
|
||||
// 是 SQL 保留字
|
||||
return isKingbaseReservedWord(ident)
|
||||
}
|
||||
|
||||
// isKingbaseReservedWord 检查是否为常见 SQL 保留字(简化版,与前端保持一致)。
|
||||
func isKingbaseReservedWord(ident string) bool {
|
||||
switch strings.ToLower(ident) {
|
||||
case "select", "from", "where", "table", "index", "user", "order", "group", "by",
|
||||
"limit", "offset", "and", "or", "not", "null", "true", "false", "key",
|
||||
"primary", "foreign", "references", "default", "constraint",
|
||||
"create", "drop", "alter", "insert", "update", "delete", "set", "values", "into",
|
||||
"join", "left", "right", "inner", "outer", "on", "as", "is", "in", "like",
|
||||
"between", "case", "when", "then", "else", "end", "having", "distinct",
|
||||
"all", "any", "exists", "union", "except", "intersect",
|
||||
"column", "check", "unique", "with", "grant", "revoke", "trigger",
|
||||
"begin", "commit", "rollback", "schema", "database", "view", "function",
|
||||
"procedure", "sequence", "type", "domain", "role", "session", "current",
|
||||
"authorization", "cross", "full", "natural", "some", "cast", "fetch",
|
||||
"for", "to", "do", "if", "return", "returns", "declare", "cursor", "server", "owner":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func quoteKingbaseIdent(name string) string {
|
||||
n := normalizeKingbaseIdentifier(name)
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
if !kingbaseIdentNeedsQuote(n) {
|
||||
return n
|
||||
}
|
||||
n = strings.ReplaceAll(n, `"`, `""`)
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
func splitKingbaseQualifiedTable(tableName string) (schema string, table string) {
|
||||
return splitKingbaseQualifiedNameCommon(tableName)
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
// dbName 在本项目语义里是“数据库”,schema 由 table_schema 决定;这里返回全部用户 schema 的列用于查询提示。
|
||||
query := `
|
||||
|
||||
117
internal/db/kingbase_impl_test.go
Normal file
117
internal/db/kingbase_impl_test.go
Normal file
@@ -0,0 +1,117 @@
|
||||
//go:build gonavi_full_drivers || gonavi_kingbase_driver
|
||||
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeKingbaseIdentifier(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want string
|
||||
}{
|
||||
{name: "plain", in: "ldf_server", want: "ldf_server"},
|
||||
{name: "quoted", in: `"ldf_server"`, want: "ldf_server"},
|
||||
{name: "double quoted", in: `""ldf_server""`, want: "ldf_server"},
|
||||
{name: "quad quoted", in: `""""ldf_server""""`, want: "ldf_server"},
|
||||
{name: "escaped quoted", in: `\"ldf_server\"`, want: "ldf_server"},
|
||||
{name: "double escaped quoted", in: `\\\"ldf_server\\\"`, want: "ldf_server"},
|
||||
{name: "backtick quoted", in: "`ldf_server`", want: "ldf_server"},
|
||||
{name: "bracket quoted", in: "[ldf_server]", want: "ldf_server"},
|
||||
{name: "embedded double quotes", in: `ldf""server`, want: "ldfserver"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := normalizeKingbaseIdentifier(tt.in); got != tt.want {
|
||||
t.Fatalf("normalizeKingbaseIdentifier(%q) = %q, want %q", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuoteKingbaseIdent(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want string
|
||||
}{
|
||||
// 纯小写+下划线:不加引号
|
||||
{name: "plain lowercase", in: "ldf_server", want: "ldf_server"},
|
||||
{name: "plain lowercase 2", in: "bcs_barcode", want: "bcs_barcode"},
|
||||
{name: "double quoted input", in: `""ldf_server""`, want: "ldf_server"},
|
||||
{name: "escaped quoted input", in: `\"ldf_server\"`, want: "ldf_server"},
|
||||
// 含大写字母:加引号
|
||||
{name: "uppercase", in: "LDF_Server", want: `"LDF_Server"`},
|
||||
{name: "mixed case", in: "myTable", want: `"myTable"`},
|
||||
// SQL 保留字:加引号
|
||||
{name: "reserved word order", in: "order", want: `"order"`},
|
||||
{name: "reserved word user", in: "user", want: `"user"`},
|
||||
{name: "reserved word table", in: "table", want: `"table"`},
|
||||
{name: "reserved word select", in: "select", want: `"select"`},
|
||||
// 含特殊字符:加引号
|
||||
{name: "with hyphen", in: "my-table", want: `"my-table"`},
|
||||
{name: "with space", in: "my table", want: `"my table"`},
|
||||
{name: "with embedded quote", in: `ab"cd`, want: `"ab""cd"`},
|
||||
// 空值
|
||||
{name: "empty", in: "", want: `""`},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := quoteKingbaseIdent(tt.in); got != tt.want {
|
||||
t.Fatalf("quoteKingbaseIdent(%q) = %q, want %q", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestKingbaseIdentNeedsQuote(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want bool
|
||||
}{
|
||||
{name: "plain lowercase", in: "ldf_server", want: false},
|
||||
{name: "starts with underscore", in: "_col", want: false},
|
||||
{name: "with digits", in: "col123", want: false},
|
||||
{name: "uppercase", in: "MyTable", want: true},
|
||||
{name: "reserved word", in: "order", want: true},
|
||||
{name: "with hyphen", in: "my-col", want: true},
|
||||
{name: "starts with digit", in: "123col", want: true},
|
||||
{name: "empty", in: "", want: false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := kingbaseIdentNeedsQuote(tt.in); got != tt.want {
|
||||
t.Fatalf("kingbaseIdentNeedsQuote(%q) = %v, want %v", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitKingbaseQualifiedTable(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
wantSchema string
|
||||
wantTable string
|
||||
}{
|
||||
{name: "plain qualified", in: "ldf_server.t_user", wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "double quoted qualified", in: `""ldf_server"".""t_user""`, wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "escaped qualified", in: `\"ldf_server\".\"t_user\"`, wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "double escaped qualified", in: `\\\"ldf_server\\\".\\\"t_user\\\"`, wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "bracket qualified", in: "[ldf_server].[t_user]", wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "table only", in: `""t_user""`, wantSchema: "", wantTable: "t_user"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotSchema, gotTable := splitKingbaseQualifiedTable(tt.in)
|
||||
if gotSchema != tt.wantSchema || gotTable != tt.wantTable {
|
||||
t.Fatalf("splitKingbaseQualifiedTable(%q) = (%q, %q), want (%q, %q)", tt.in, gotSchema, gotTable, tt.wantSchema, tt.wantTable)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user