mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 20:29:43 +08:00
Compare commits
201 Commits
v0.3.2
...
release/0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11b8e0f12a | ||
|
|
1dabac1a65 | ||
|
|
e013288967 | ||
|
|
ec05f518a9 | ||
|
|
2c9aa640fd | ||
|
|
d467322ebe | ||
|
|
9f7cc58fad | ||
|
|
97bf891df3 | ||
|
|
72a9692200 | ||
|
|
e26a456eae | ||
|
|
eaa45f17fd | ||
|
|
f101a59d32 | ||
|
|
501ad9e9a3 | ||
|
|
482a7fce2e | ||
|
|
e6af5f966b | ||
|
|
eef973b7fc | ||
|
|
d8b6b4ef8d | ||
|
|
4d58cc6e26 | ||
|
|
b0bdddad9b | ||
|
|
a73ca36a32 | ||
|
|
92e9381fcc | ||
|
|
c4c7e379d1 | ||
|
|
695713c779 | ||
|
|
6ad690cffc | ||
|
|
ca49b37dc7 | ||
|
|
c8c0c5f20a | ||
|
|
d61d7ec39b | ||
|
|
e964c8ecf8 | ||
|
|
7644462180 | ||
|
|
3bd02e2e09 | ||
|
|
22bd1c4c28 | ||
|
|
0daf702d25 | ||
|
|
058c74e49a | ||
|
|
b85c7529ec | ||
|
|
e521d2125f | ||
|
|
450fdfa59e | ||
|
|
c87b15b22a | ||
|
|
89c81823bc | ||
|
|
797ba27d20 | ||
|
|
ed1f40e04a | ||
|
|
2b190e564f | ||
|
|
1c050aefd0 | ||
|
|
75a5a322e0 | ||
|
|
61d6197fe3 | ||
|
|
6157161293 | ||
|
|
0f843a7dcf | ||
|
|
fb65b553e9 | ||
|
|
1a5bf79dd3 | ||
|
|
dea096d4c2 | ||
|
|
04f8b266d3 | ||
|
|
b53227cb15 | ||
|
|
0246d7fae5 | ||
|
|
4aa177ed37 | ||
|
|
4f5a7bd94b | ||
|
|
00c6f9871f | ||
|
|
6a4b397ecc | ||
|
|
3973038aea | ||
|
|
71b41459e7 | ||
|
|
69942bb77e | ||
|
|
f372b20a68 | ||
|
|
e6da986927 | ||
|
|
4570516678 | ||
|
|
8c91d8929b | ||
|
|
786835c9bc | ||
|
|
f2fc7cbd05 | ||
|
|
462ca57907 | ||
|
|
4bfdb2cb6c | ||
|
|
6918b56ed9 | ||
|
|
1afb8850ad | ||
|
|
3284eeba17 | ||
|
|
494484eb92 | ||
|
|
6156884455 | ||
|
|
a54b8906a3 | ||
|
|
f477feab2f | ||
|
|
e76e174bfe | ||
|
|
b904c0b107 | ||
|
|
c02e7c12e8 | ||
|
|
a87c801e66 | ||
|
|
7f00139847 | ||
|
|
78c5351399 | ||
|
|
e2acfa51eb | ||
|
|
9a684cd82c | ||
|
|
e3b142053f | ||
|
|
3ca898a950 | ||
|
|
84688e995a | ||
|
|
4d0940636d | ||
|
|
26b79adc5f | ||
|
|
90aa3561be | ||
|
|
ec59023736 | ||
|
|
4a96cb93d2 | ||
|
|
4c322db9d0 | ||
|
|
ed18c8285f | ||
|
|
5f8cedabd8 | ||
|
|
20923989b9 | ||
|
|
210106cde7 | ||
|
|
87aac277ec | ||
|
|
4de3f408c5 | ||
|
|
439625a49c | ||
|
|
884d72f3d3 | ||
|
|
98c1600e13 | ||
|
|
eb594b7741 | ||
|
|
587ed3444b | ||
|
|
e366a61910 | ||
|
|
5986b71c4d | ||
|
|
cb18bc3067 | ||
|
|
d676ac9084 | ||
|
|
7fcbcb2471 | ||
|
|
c680e50e74 | ||
|
|
9685102229 | ||
|
|
3505b4428a | ||
|
|
9ebdf7f053 | ||
|
|
9ad852c10b | ||
|
|
2a8fff4d93 | ||
|
|
eca560b4e5 | ||
|
|
2f475dddc0 | ||
|
|
ad9d8a12be | ||
|
|
095b22951e | ||
|
|
7350a011e3 | ||
|
|
53b5802add | ||
|
|
54e7077317 | ||
|
|
4cb5071b0b | ||
|
|
96de46cf1e | ||
|
|
7d5592d8d9 | ||
|
|
d0ba8822f3 | ||
|
|
140db73ef4 | ||
|
|
7ae5341c1c | ||
|
|
bec5013a44 | ||
|
|
66a3113fa8 | ||
|
|
a435d62d3b | ||
|
|
50d92d3184 | ||
|
|
91658848c9 | ||
|
|
01940e74b7 | ||
|
|
30210bc40e | ||
|
|
fda30539b6 | ||
|
|
1ba68fcbfe | ||
|
|
f0e1c7e72c | ||
|
|
e90a3e2db6 | ||
|
|
663717d738 | ||
|
|
5329f212f7 | ||
|
|
d6e967a0d0 | ||
|
|
7ca2d20c17 | ||
|
|
9307ca5e16 | ||
|
|
60a42e3c34 | ||
|
|
5df95730d8 | ||
|
|
26a7aacfec | ||
|
|
67a9c454d0 | ||
|
|
c17493952b | ||
|
|
dd258bd46c | ||
|
|
8df9ea717c | ||
|
|
505c89066b | ||
|
|
31f2a47d26 | ||
|
|
e01ecfc387 | ||
|
|
69d9a0b11e | ||
|
|
33f4208f39 | ||
|
|
0eeda1d137 | ||
|
|
17d174bc5b | ||
|
|
9320f524a2 | ||
|
|
e31dc4e7f1 | ||
|
|
ab92e94bf8 | ||
|
|
da5708b5bc | ||
|
|
189a2a1871 | ||
|
|
ecf47da81b | ||
|
|
21c8b9a102 | ||
|
|
a07b418b8f | ||
|
|
4bf10e5612 | ||
|
|
e6fe6eb026 | ||
|
|
b4f80f39df | ||
|
|
4d32dd2cb5 | ||
|
|
de8fb60a30 | ||
|
|
b3b77f490d | ||
|
|
52abed83e6 | ||
|
|
80dc863455 | ||
|
|
1a3b55ce19 | ||
|
|
fa318a9f0e | ||
|
|
8dafad7ce3 | ||
|
|
78e35a5be8 | ||
|
|
35ed555857 | ||
|
|
954a5d77d3 | ||
|
|
f3130ff517 | ||
|
|
012c99be9e | ||
|
|
c8575c315b | ||
|
|
601d69faeb | ||
|
|
fdb7781a9b | ||
|
|
087578693e | ||
|
|
aceabb63f5 | ||
|
|
8587f72f81 | ||
|
|
1b5a71d478 | ||
|
|
83ad3b09d9 | ||
|
|
72811092b4 | ||
|
|
b67135e2c1 | ||
|
|
f5e16b0b70 | ||
|
|
f8535dd272 | ||
|
|
5cd8681b80 | ||
|
|
4b381c82b5 | ||
|
|
820b064e7f | ||
|
|
70cb6148c6 | ||
|
|
0cb9cb8bc9 | ||
|
|
c2c88d743b | ||
|
|
e8ef6b0b38 | ||
|
|
257459f96a | ||
|
|
027115ab87 |
26
.github/release.yaml
vendored
Normal file
26
.github/release.yaml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: 新功能
|
||||
labels:
|
||||
- feature
|
||||
- enhancement
|
||||
- feat
|
||||
- title: 问题修复
|
||||
labels:
|
||||
- bug
|
||||
- fix
|
||||
- title: 文档与流程
|
||||
labels:
|
||||
- docs
|
||||
- documentation
|
||||
- ci
|
||||
- workflow
|
||||
- chore
|
||||
- title: 重构与优化
|
||||
labels:
|
||||
- refactor
|
||||
- perf
|
||||
- optimization
|
||||
- title: 其他更新
|
||||
labels:
|
||||
- '*'
|
||||
3
.github/workflows/release-winget.yml
vendored
3
.github/workflows/release-winget.yml
vendored
@@ -10,6 +10,9 @@ on:
|
||||
description: 'Tag of release you want to publish'
|
||||
type: string
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
|
||||
428
.github/workflows/release.yml
vendored
428
.github/workflows/release.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
# Phase 1: Build in parallel and output artifacts
|
||||
build:
|
||||
@@ -19,23 +22,59 @@ jobs:
|
||||
include:
|
||||
- os: macos-latest
|
||||
platform: darwin/amd64
|
||||
artifact_name: GoNavi-mac-amd64
|
||||
asset_ext: .dmg
|
||||
os_name: MacOS
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-build-darwin-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: macos-latest
|
||||
platform: darwin/arm64
|
||||
artifact_name: GoNavi-mac-arm64
|
||||
asset_ext: .dmg
|
||||
os_name: MacOS
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-build-darwin-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/amd64
|
||||
artifact_name: GoNavi-windows-amd64
|
||||
asset_ext: .exe
|
||||
os_name: Windows
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-build-windows-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/arm64
|
||||
artifact_name: GoNavi-windows-arm64
|
||||
asset_ext: .exe
|
||||
os_name: Windows
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-build-windows-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: ubuntu-22.04
|
||||
platform: linux/amd64
|
||||
artifact_name: GoNavi-linux-amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-build-linux-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: "4.0"
|
||||
# Debian 13 (trixie) 默认仓库已切到 WebKitGTK 4.1:单独提供 4.1 变体产物
|
||||
- os: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-build-linux-amd64-webkit41
|
||||
wails_tags: "webkit2_41"
|
||||
artifact_suffix: "-WebKit41"
|
||||
build_optional_agents: false
|
||||
linux_webkit: "4.1"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -52,12 +91,37 @@ jobs:
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install UPX (Windows)
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
choco install upx --no-progress -y
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未检测到 upx,无法保证 Windows 产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
& upx --version
|
||||
|
||||
# Linux Dependencies (GTK3, WebKit2GTK required by Wails)
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libfuse2
|
||||
sudo apt-get install -y libgtk-3-dev
|
||||
|
||||
# WebKitGTK 4.1 需要 libsoup3;4.0 使用 libsoup2(通常由 webkit2gtk dev 包拉起)
|
||||
if [ "${{ matrix.linux_webkit }}" = "4.1" ]; then
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libsoup-3.0-dev
|
||||
else
|
||||
sudo apt-get install -y libwebkit2gtk-4.0-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y upx-ucl || sudo apt-get install -y upx
|
||||
upx --version
|
||||
|
||||
# AppImage 运行/打包可能需要 FUSE2。不同发行版/版本包名不同,做兼容兜底。
|
||||
sudo apt-get install -y libfuse2 || sudo apt-get install -y libfuse2t64 || true
|
||||
|
||||
# Download linuxdeploy tools for AppImage packaging
|
||||
LINUXDEPLOY_URL="https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
|
||||
@@ -85,16 +149,144 @@ jobs:
|
||||
- name: Install Wails
|
||||
run: go install -v github.com/wailsapp/wails/v2/cmd/wails@latest
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Outcome = "${{ steps.msys2_duckdb.outcome }}"
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
if ($msys2Outcome -ne 'success') {
|
||||
Write-Warning "⚠️ MSYS2 安装步骤结果为 $msys2Outcome,回退到 UCRT64 本机路径探测"
|
||||
} else {
|
||||
Write-Warning "⚠️ MSYS2 已执行,但未找到 UCRT64 gcc/g++,回退到本机路径探测"
|
||||
}
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
}
|
||||
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。已检查:$($candidateBins -join ', ')"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = (Join-Path $mingwBin 'gcc.exe')
|
||||
$gxx = (Join-Path $mingwBin 'g++.exe')
|
||||
|
||||
if (!(Test-Path $gcc) -or !(Test-Path $gxx)) {
|
||||
Write-Error "❌ DuckDB 编译器缺失:gcc=$gcc g++=$gxx"
|
||||
exit 1
|
||||
}
|
||||
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
Write-Host "✅ 已配置 DuckDB cgo 编译器: gcc=$gcc g++=$gxx"
|
||||
|
||||
- name: Verify DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
& "$env:CC" --version
|
||||
& "$env:CXX" --version
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.artifact_name }} -ldflags "-X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
set -euo pipefail
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
else
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
fi
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "⚠️ 跳过 DuckDB driver(当前平台 ${GOOS}/${GOARCH} 不受支持,仅支持 windows/amd64)"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
fi
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
echo "🔧 构建 ${OUTPUT_PATH} (tag=${TAG})"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
-trimpath \
|
||||
-ldflags "-s -w" \
|
||||
-o "${OUTPUT_PATH}" \
|
||||
./cmd/optional-driver-agent
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
-trimpath \
|
||||
-ldflags "-s -w" \
|
||||
-o "${OUTPUT_PATH}" \
|
||||
./cmd/optional-driver-agent
|
||||
fi
|
||||
done
|
||||
|
||||
# macOS Packaging
|
||||
- name: Package macOS DMG
|
||||
if: contains(matrix.platform, 'darwin')
|
||||
run: |
|
||||
brew install create-dmg
|
||||
VERSION="${{ github.ref_name }}"
|
||||
VERSION="${VERSION#v}"
|
||||
cd build/bin
|
||||
|
||||
APP_PATH=$(find . -maxdepth 1 -name "*.app" | head -n 1)
|
||||
@@ -103,11 +295,21 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
|
||||
APP_BIN=$(find "$APP_PATH/Contents/MacOS" -maxdepth 1 -type f | head -n 1)
|
||||
if [ -z "$APP_BIN" ]; then
|
||||
echo "❌ 未找到 macOS 应用主程序!"
|
||||
exit 1
|
||||
fi
|
||||
echo "ℹ️ macOS 产物不执行 UPX 压缩,保留原始主程序。"
|
||||
|
||||
echo "🔏 正在进行 Ad-hoc 签名..."
|
||||
codesign --force --options runtime --deep --sign - "$APP_NAME"
|
||||
# 注意:Ad-hoc + hardened runtime(--options runtime)在未配置 entitlements 时,
|
||||
# 可能导致部分 macOS 机型上应用双击无响应。这里保持 Ad-hoc 深签名但禁用 runtime hardened。
|
||||
codesign --force --deep --sign - "$APP_NAME"
|
||||
|
||||
DMG_NAME="${{ matrix.artifact_name }}.dmg"
|
||||
DMG_NAME="${{ matrix.build_name }}.dmg"
|
||||
FINAL_NAME="GoNavi-$VERSION-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.dmg"
|
||||
echo "📦 正在生成 DMG: $DMG_NAME..."
|
||||
|
||||
create-dmg \
|
||||
@@ -121,35 +323,75 @@ jobs:
|
||||
"$DMG_NAME" \
|
||||
"$APP_NAME"
|
||||
|
||||
mv "$DMG_NAME" ../../
|
||||
mv "$DMG_NAME" "../../$FINAL_NAME"
|
||||
|
||||
# Windows Packaging
|
||||
- name: Prepare Windows Exe
|
||||
- name: Package Windows EXE
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: bash
|
||||
shell: pwsh
|
||||
run: |
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.artifact_name }}"
|
||||
|
||||
if [ -f "$TARGET.exe" ]; then
|
||||
FINAL_EXE="$TARGET.exe"
|
||||
elif [ -f "$TARGET" ]; then
|
||||
mv "$TARGET" "$TARGET.exe"
|
||||
FINAL_EXE="$TARGET.exe"
|
||||
else
|
||||
echo "❌ 未找到构建产物 '$TARGET'!"
|
||||
Set-Location build/bin
|
||||
$version = "${{ github.ref_name }}"
|
||||
if ($version.StartsWith("v")) {
|
||||
$version = $version.Substring(1)
|
||||
}
|
||||
$target = "${{ matrix.build_name }}"
|
||||
$finalExeName = "GoNavi-$version-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.exe"
|
||||
|
||||
if (Test-Path "$target.exe") {
|
||||
$finalExe = "$target.exe"
|
||||
} elseif (Test-Path "$target") {
|
||||
Rename-Item -Path "$target" -NewName "$target.exe"
|
||||
$finalExe = "$target.exe"
|
||||
} else {
|
||||
Write-Error "❌ 未找到构建产物 '$target'!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📦 正在移动 $FINAL_EXE 到根目录..."
|
||||
mv "$FINAL_EXE" "../../$FINAL_EXE"
|
||||
}
|
||||
|
||||
$isArm64Target = "${{ matrix.arch_name }}".ToLowerInvariant() -eq "arm64"
|
||||
if ($isArm64Target) {
|
||||
Write-Warning "⚠️ UPX 当前不支持 win64/arm64,跳过压缩并保留原始 EXE。"
|
||||
$LASTEXITCODE = 0
|
||||
} else {
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未找到 upx,无法保证 Windows 产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
$beforeBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
Write-Host "🗜️ 使用 UPX 压缩 $finalExe ..."
|
||||
& upx --best --lzma --force $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 压缩失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
& upx -t $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 校验失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
$afterBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
if ($afterBytes -lt $beforeBytes) {
|
||||
$savedBytes = $beforeBytes - $afterBytes
|
||||
Write-Host ("✅ UPX 压缩完成:{0:N2}MB -> {1:N2}MB,减少 {2:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB), ($savedBytes / 1MB))
|
||||
} else {
|
||||
Write-Host ("ℹ️ UPX 压缩完成:{0:N2}MB -> {1:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB))
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "📦 输出 Windows 可执行文件 $finalExeName..."
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\\..\\$finalExeName" -Force
|
||||
|
||||
# Linux Packaging (tar.gz and AppImage)
|
||||
- name: Package Linux
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
VERSION="${{ github.ref_name }}"
|
||||
VERSION="${VERSION#v}"
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.artifact_name }}"
|
||||
TARGET="${{ matrix.build_name }}"
|
||||
TAR_NAME="GoNavi-$VERSION-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.tar.gz"
|
||||
APPIMAGE_NAME="GoNavi-$VERSION-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.AppImage"
|
||||
|
||||
if [ ! -f "$TARGET" ]; then
|
||||
echo "❌ 未找到构建产物 '$TARGET'!"
|
||||
@@ -157,11 +399,22 @@ jobs:
|
||||
fi
|
||||
|
||||
chmod +x "$TARGET"
|
||||
BEFORE_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
echo "🗜️ 正在使用 UPX 压缩 Linux 可执行文件: $TARGET ..."
|
||||
upx --best --lzma --force "$TARGET"
|
||||
upx -t "$TARGET"
|
||||
AFTER_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
if [ "$AFTER_BYTES" -lt "$BEFORE_BYTES" ]; then
|
||||
SAVED_BYTES=$((BEFORE_BYTES - AFTER_BYTES))
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" -v s="$SAVED_BYTES" 'BEGIN { printf "✅ Linux UPX 压缩完成:%.2fMB -> %.2fMB,减少 %.2fMB\n", b/1024/1024, a/1024/1024, s/1024/1024 }'
|
||||
else
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" 'BEGIN { printf "ℹ️ Linux UPX 压缩完成:%.2fMB -> %.2fMB\n", b/1024/1024, a/1024/1024 }'
|
||||
fi
|
||||
|
||||
# 1. Create tar.gz
|
||||
echo "📦 正在打包 $TARGET.tar.gz..."
|
||||
tar -czvf "$TARGET.tar.gz" "$TARGET"
|
||||
mv "$TARGET.tar.gz" ../../
|
||||
echo "📦 正在打包 $TAR_NAME..."
|
||||
tar -czvf "$TAR_NAME" "$TARGET"
|
||||
mv "$TAR_NAME" ../../
|
||||
|
||||
# 2. Create AppImage (skip for ARM64 or if tools unavailable)
|
||||
if [ -f /tmp/skip-appimage ]; then
|
||||
@@ -211,13 +464,13 @@ jobs:
|
||||
}
|
||||
|
||||
# Rename output
|
||||
mv GoNavi*.AppImage "$TARGET.AppImage" 2>/dev/null || {
|
||||
mv GoNavi*.AppImage "$APPIMAGE_NAME" 2>/dev/null || {
|
||||
echo "⚠️ AppImage 重命名失败"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if [ -f "$TARGET.AppImage" ]; then
|
||||
mv "$TARGET.AppImage" ../../
|
||||
if [ -f "$APPIMAGE_NAME" ]; then
|
||||
mv "$APPIMAGE_NAME" ../../
|
||||
echo "✅ AppImage 生成成功"
|
||||
fi
|
||||
|
||||
@@ -231,6 +484,7 @@ jobs:
|
||||
GoNavi-*.exe
|
||||
GoNavi-*.tar.gz
|
||||
GoNavi-*.AppImage
|
||||
drivers/**
|
||||
retention-days: 1
|
||||
|
||||
# Phase 2: Collect all artifacts and Publish Release (Single Job)
|
||||
@@ -249,10 +503,107 @@ jobs:
|
||||
- name: List Assets
|
||||
run: ls -R release-assets
|
||||
|
||||
- name: Verify Optional Driver Assets
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd release-assets
|
||||
|
||||
REQUIRED_FILES=(
|
||||
"drivers/Windows/duckdb-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/duckdb-driver-agent-linux-amd64"
|
||||
"drivers/Windows/clickhouse-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/clickhouse-driver-agent-linux-amd64"
|
||||
)
|
||||
|
||||
missing=0
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "❌ 缺少驱动资产:$file"
|
||||
missing=1
|
||||
else
|
||||
echo "✅ 已找到驱动资产:$file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$missing" -ne 0 ]; then
|
||||
echo "❌ 可选驱动资产不完整,终止发布"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Package Driver Agents Bundle
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd release-assets
|
||||
if [ ! -d drivers ]; then
|
||||
echo "⚠️ 未找到 drivers 目录,跳过驱动总包打包"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$(find drivers -type f 2>/dev/null | head -n 1)" ]; then
|
||||
echo "⚠️ drivers 目录为空,跳过驱动总包打包"
|
||||
rm -rf drivers
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📦 打包驱动总包:GoNavi-DriverAgents.zip"
|
||||
python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
out_name = "GoNavi-DriverAgents.zip"
|
||||
index_name = "GoNavi-DriverAgents-Index.json"
|
||||
base = Path("drivers")
|
||||
out_path = Path(out_name)
|
||||
index_path = Path(index_name)
|
||||
if out_path.exists():
|
||||
out_path.unlink()
|
||||
if index_path.exists():
|
||||
index_path.unlink()
|
||||
|
||||
size_index = {}
|
||||
with zipfile.ZipFile(out_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
|
||||
for p in base.rglob("*"):
|
||||
if not p.is_file():
|
||||
continue
|
||||
arcname = p.relative_to(base).as_posix()
|
||||
zf.write(p, arcname)
|
||||
size_index[p.name] = p.stat().st_size
|
||||
|
||||
index_path.write_text(
|
||||
json.dumps({"assets": size_index}, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
print(f"created {out_name} size={out_path.stat().st_size} bytes")
|
||||
print(f"created {index_name} entries={len(size_index)}")
|
||||
PY
|
||||
|
||||
# Release 只发布一个驱动总包,避免大量平铺资产污染 Release 页面
|
||||
rm -rf drivers
|
||||
|
||||
- name: Generate SHA256SUMS
|
||||
shell: bash
|
||||
run: |
|
||||
cd release-assets
|
||||
sha256sum * > SHA256SUMS
|
||||
FILES=()
|
||||
while IFS= read -r file; do
|
||||
if [ -n "$file" ]; then
|
||||
FILES+=("$file")
|
||||
fi
|
||||
done < <(find . -maxdepth 1 -type f ! -name SHA256SUMS -exec basename {} \; | sort)
|
||||
if [ ${#FILES[@]} -eq 0 ]; then
|
||||
echo "⚠️ 未找到可签名资产,生成空 SHA256SUMS"
|
||||
: > SHA256SUMS
|
||||
else
|
||||
sha256sum "${FILES[@]}" > SHA256SUMS
|
||||
fi
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -261,5 +612,6 @@ jobs:
|
||||
files: release-assets/*
|
||||
draft: true
|
||||
make_latest: true
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
404
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
404
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
@@ -0,0 +1,404 @@
|
||||
name: Test Build All Platforms (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
concurrency:
|
||||
group: test-build-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
platform: darwin/amd64
|
||||
os_name: MacOS
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-darwin-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: macos-latest
|
||||
platform: darwin/arm64
|
||||
os_name: MacOS
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-darwin-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/amd64
|
||||
os_name: Windows
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-windows-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/arm64
|
||||
os_name: Windows
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-windows-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: ubuntu-22.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: "4.0"
|
||||
- os: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64-webkit41
|
||||
wails_tags: "webkit2_41"
|
||||
artifact_suffix: "-WebKit41"
|
||||
build_optional_agents: false
|
||||
linux_webkit: "4.1"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.24'
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install UPX (Windows)
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
choco install upx --no-progress -y
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未检测到 upx,无法保证 Windows 测试产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
& upx --version
|
||||
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev
|
||||
|
||||
if [ "${{ matrix.linux_webkit }}" = "4.1" ]; then
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libsoup-3.0-dev
|
||||
else
|
||||
sudo apt-get install -y libwebkit2gtk-4.0-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y upx-ucl || sudo apt-get install -y upx
|
||||
upx --version
|
||||
|
||||
sudo apt-get install -y libfuse2 || sudo apt-get install -y libfuse2t64 || true
|
||||
|
||||
LINUXDEPLOY_URL="https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
|
||||
PLUGIN_URL="https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/releases/download/continuous/linuxdeploy-plugin-gtk-x86_64.AppImage"
|
||||
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy "$LINUXDEPLOY_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy-plugin-gtk "$PLUGIN_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
|
||||
if [ "${skip-appimage:-false}" != "true" ]; then
|
||||
chmod +x /tmp/linuxdeploy /tmp/linuxdeploy-plugin-gtk
|
||||
fi
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = Join-Path $mingwBin 'gcc.exe'
|
||||
$gxx = Join-Path $mingwBin 'g++.exe'
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
- name: Build App
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
else
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "跳过 DuckDB driver: ${GOOS}/${GOARCH}"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
fi
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Package macOS
|
||||
if: contains(matrix.platform, 'darwin')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
brew install create-dmg
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
APP_PATH=$(find . -maxdepth 1 -name "*.app" | head -n 1)
|
||||
if [ -z "$APP_PATH" ]; then
|
||||
echo "未找到 .app 应用包"
|
||||
exit 1
|
||||
fi
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
APP_BIN=$(find "$APP_PATH/Contents/MacOS" -maxdepth 1 -type f | head -n 1)
|
||||
if [ -z "$APP_BIN" ]; then
|
||||
echo "未找到 macOS 应用主程序"
|
||||
exit 1
|
||||
fi
|
||||
echo "ℹ️ macOS 产物不执行 UPX 压缩,保留原始主程序。"
|
||||
codesign --force --deep --sign - "$APP_NAME"
|
||||
ZIP_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
DMG_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.dmg"
|
||||
mkdir -p ../../artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_NAME" "../../artifacts/$ZIP_NAME"
|
||||
create-dmg \
|
||||
--volname "GoNavi Test Installer" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_NAME" 200 190 \
|
||||
--hide-extension "$APP_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DMG_NAME" \
|
||||
"$APP_NAME"
|
||||
mv "$DMG_NAME" "../../artifacts/$DMG_NAME"
|
||||
shasum -a 256 "../../artifacts/$ZIP_NAME" > "../../artifacts/$ZIP_NAME.sha256"
|
||||
shasum -a 256 "../../artifacts/$DMG_NAME" > "../../artifacts/$DMG_NAME.sha256"
|
||||
|
||||
- name: Package Windows
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
$label = "${{ inputs.build_label }}"
|
||||
if ([string]::IsNullOrWhiteSpace($label)) { $label = 'test' }
|
||||
Set-Location build/bin
|
||||
$target = "${{ matrix.build_name }}"
|
||||
$finalExeName = "GoNavi-$label-${{ matrix.os_name }}-${{ matrix.arch_name }}-run$env:GITHUB_RUN_NUMBER.exe"
|
||||
if (Test-Path "$target.exe") {
|
||||
$finalExe = "$target.exe"
|
||||
} elseif (Test-Path "$target") {
|
||||
Rename-Item -Path "$target" -NewName "$target.exe"
|
||||
$finalExe = "$target.exe"
|
||||
} else {
|
||||
Write-Error "未找到构建产物 '$target'"
|
||||
exit 1
|
||||
}
|
||||
$isArm64Target = "${{ matrix.arch_name }}".ToLowerInvariant() -eq "arm64"
|
||||
if ($isArm64Target) {
|
||||
Write-Warning "⚠️ UPX 当前不支持 win64/arm64,跳过压缩并保留原始 EXE。"
|
||||
$LASTEXITCODE = 0
|
||||
} else {
|
||||
$upxCmd = Get-Command upx -ErrorAction SilentlyContinue
|
||||
if ($null -eq $upxCmd) {
|
||||
Write-Error "❌ 未找到 upx,无法保证 Windows 测试产物经过压缩"
|
||||
exit 1
|
||||
}
|
||||
$beforeBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
Write-Host "🗜️ 使用 UPX 压缩 $finalExe ..."
|
||||
& upx --best --lzma --force $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 压缩失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
& upx -t $finalExe | Out-Host
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "❌ UPX 校验失败($LASTEXITCODE)"
|
||||
exit 1
|
||||
}
|
||||
$afterBytes = (Get-Item -LiteralPath $finalExe).Length
|
||||
if ($afterBytes -lt $beforeBytes) {
|
||||
$savedBytes = $beforeBytes - $afterBytes
|
||||
Write-Host ("✅ UPX 压缩完成:{0:N2}MB -> {1:N2}MB,减少 {2:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB), ($savedBytes / 1MB))
|
||||
} else {
|
||||
Write-Host ("ℹ️ UPX 压缩完成:{0:N2}MB -> {1:N2}MB" -f ($beforeBytes / 1MB), ($afterBytes / 1MB))
|
||||
}
|
||||
}
|
||||
New-Item -ItemType Directory -Force -Path ..\..\artifacts | Out-Null
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\..\artifacts\$finalExeName" -Force
|
||||
Get-FileHash "..\..\artifacts\$finalExeName" -Algorithm SHA256 | ForEach-Object { "{0} *{1}" -f $_.Hash.ToLower(), (Split-Path $_.Path -Leaf) } | Out-File "..\..\artifacts\$finalExeName.sha256" -Encoding ascii
|
||||
|
||||
- name: Package Linux
|
||||
if: contains(matrix.platform, 'linux')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.build_name }}"
|
||||
TAR_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.tar.gz"
|
||||
APPIMAGE_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.AppImage"
|
||||
mkdir -p ../../artifacts
|
||||
|
||||
if [ ! -f "$TARGET" ]; then
|
||||
echo "未找到构建产物 '$TARGET'"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x "$TARGET"
|
||||
BEFORE_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
echo "🗜️ 使用 UPX 压缩 Linux 可执行文件: $TARGET ..."
|
||||
upx --best --lzma --force "$TARGET"
|
||||
upx -t "$TARGET"
|
||||
AFTER_BYTES=$(wc -c <"$TARGET" | tr -d '[:space:]')
|
||||
if [ "$AFTER_BYTES" -lt "$BEFORE_BYTES" ]; then
|
||||
SAVED_BYTES=$((BEFORE_BYTES - AFTER_BYTES))
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" -v s="$SAVED_BYTES" 'BEGIN { printf "✅ Linux UPX 压缩完成:%.2fMB -> %.2fMB,减少 %.2fMB\n", b/1024/1024, a/1024/1024, s/1024/1024 }'
|
||||
else
|
||||
awk -v b="$BEFORE_BYTES" -v a="$AFTER_BYTES" 'BEGIN { printf "ℹ️ Linux UPX 压缩完成:%.2fMB -> %.2fMB\n", b/1024/1024, a/1024/1024 }'
|
||||
fi
|
||||
tar -czvf "../../artifacts/$TAR_NAME" "$TARGET"
|
||||
sha256sum "../../artifacts/$TAR_NAME" > "../../artifacts/$TAR_NAME.sha256"
|
||||
|
||||
if [ "${skip-appimage:-false}" = "true" ]; then
|
||||
echo "跳过 AppImage 打包"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p AppDir/usr/bin AppDir/usr/share/applications AppDir/usr/share/icons/hicolor/256x256/apps
|
||||
cp "$TARGET" AppDir/usr/bin/gonavi
|
||||
printf '%s\n' '[Desktop Entry]' 'Name=GoNavi' 'Exec=gonavi' 'Icon=gonavi' 'Type=Application' 'Categories=Development;Database;' 'Comment=Database Management Tool' > AppDir/usr/share/applications/gonavi.desktop
|
||||
cp AppDir/usr/share/applications/gonavi.desktop AppDir/gonavi.desktop
|
||||
if [ -f "../../build/appicon.png" ]; then
|
||||
cp "../../build/appicon.png" AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
cp "../../build/appicon.png" AppDir/gonavi.png
|
||||
else
|
||||
touch AppDir/gonavi.png
|
||||
cp AppDir/gonavi.png AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
fi
|
||||
export DEPLOY_GTK_VERSION=3
|
||||
/tmp/linuxdeploy --appdir AppDir --plugin gtk --output appimage || exit 0
|
||||
mv GoNavi*.AppImage "$APPIMAGE_NAME" 2>/dev/null || exit 0
|
||||
mv "$APPIMAGE_NAME" "../../artifacts/$APPIMAGE_NAME"
|
||||
sha256sum "../../artifacts/$APPIMAGE_NAME" > "../../artifacts/$APPIMAGE_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-build-${{ matrix.build_name }}-run${{ github.run_number }}
|
||||
path: |
|
||||
artifacts/*
|
||||
drivers/**
|
||||
if-no-files-found: error
|
||||
retention-days: 7
|
||||
94
.github/workflows/test-macos-build.yml
vendored
Normal file
94
.github/workflows/test-macos-build.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: Test Build macOS (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
push:
|
||||
branches:
|
||||
- feature/kingbase_opt
|
||||
paths:
|
||||
- ".github/workflows/test-macos-build.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
build-macos:
|
||||
name: Build macOS ${{ matrix.arch }}
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: darwin/amd64
|
||||
arch: amd64
|
||||
- platform: darwin/arm64
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24.3"
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OUTPUT_NAME="gonavi-test-${{ matrix.arch }}"
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
wails build \
|
||||
-platform "${{ matrix.platform }}" \
|
||||
-clean \
|
||||
-o "$OUTPUT_NAME" \
|
||||
-ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
|
||||
- name: Package Zip
|
||||
run: |
|
||||
set -euo pipefail
|
||||
APP_PATH="build/bin/gonavi-test-${{ matrix.arch }}.app"
|
||||
if [ ! -d "$APP_PATH" ]; then
|
||||
APP_PATH=$(find build/bin -maxdepth 1 -name "*.app" | head -n 1 || true)
|
||||
fi
|
||||
if [ -z "$APP_PATH" ] || [ ! -d "$APP_PATH" ]; then
|
||||
echo "未找到 .app 产物"
|
||||
ls -la build/bin || true
|
||||
exit 1
|
||||
fi
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
ZIP_NAME="GoNavi-${LABEL}-macos-${{ matrix.arch }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
mkdir -p artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_PATH" "artifacts/$ZIP_NAME"
|
||||
shasum -a 256 "artifacts/$ZIP_NAME" > "artifacts/$ZIP_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gonavi-macos-${{ matrix.arch }}-run${{ github.run_number }}
|
||||
path: artifacts/*
|
||||
if-no-files-found: error
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -19,3 +19,6 @@ GoNavi-Wails.exe
|
||||
.ace-tool/
|
||||
.claude/
|
||||
tmpclaude-*
|
||||
|
||||
CLAUDE.md
|
||||
**/CLAUDE.md
|
||||
|
||||
155
CONTRIBUTING.md
Normal file
155
CONTRIBUTING.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Contributing Guide
|
||||
|
||||
Thank you for contributing to this project.
|
||||
|
||||
This repository follows a release-first workflow: `main` is the default public branch, while releases are prepared through `release/*` branches.
|
||||
|
||||
---
|
||||
|
||||
## Branch Model
|
||||
|
||||
- `main`: stable release branch and default branch
|
||||
- `dev`: day-to-day integration branch for maintainers
|
||||
- `release/*`: release preparation branches for maintainers
|
||||
- Recommended branch names for external contributors:
|
||||
- `fix/*`: bug fixes
|
||||
- `feature/*`: new features or enhancements
|
||||
|
||||
Maintainer release flow:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## How External Contributors Should Open Pull Requests
|
||||
|
||||
Whether your branch is `fix/*` or `feature/*`, external contributors should **open pull requests directly against `main`**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- `main` is the default branch, so the PR entry point is clearer
|
||||
- merged contributions are immediately visible on the default branch
|
||||
- maintainers can handle downstream sync and release preparation in one place
|
||||
|
||||
Recommended flow:
|
||||
|
||||
1. Fork this repository
|
||||
2. Create a branch in your fork (`fix/*` or `feature/*` is recommended)
|
||||
3. Make your changes and perform basic self-checks
|
||||
4. Push the branch to your fork
|
||||
5. Open a pull request against the `main` branch of this repository
|
||||
|
||||
---
|
||||
|
||||
## Pull Request Requirements
|
||||
|
||||
Please keep each pull request focused, reviewable, and easy to validate.
|
||||
|
||||
Recommended expectations:
|
||||
|
||||
- one pull request should address one logical change
|
||||
- use a clear title that explains the purpose
|
||||
- include the following in the description:
|
||||
- background and problem statement
|
||||
- key changes
|
||||
- impact scope
|
||||
- validation method
|
||||
- include screenshots or recordings for UI changes when helpful
|
||||
- explicitly mention risk and rollback notes for compatibility, data, or build-chain changes
|
||||
|
||||
---
|
||||
|
||||
## Merge Strategy for Maintainers
|
||||
|
||||
Pull requests merged into `main` should generally use **Squash and merge**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- keeps `main` history clean and linear
|
||||
- maps each PR to a single commit on `main`
|
||||
- reduces release, audit, and rollback complexity
|
||||
|
||||
---
|
||||
|
||||
## Maintainer Sync Rules
|
||||
|
||||
Because external pull requests are merged directly into `main`, maintainers must sync `main` back to development and release branches to avoid branch drift.
|
||||
|
||||
### 1. Sync `main` -> `dev` (required)
|
||||
|
||||
The automatic GitHub Actions sync workflow has been removed.
|
||||
Maintainers should sync `main` back to `dev` manually when needed:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. Create `release/*` from `dev`
|
||||
|
||||
Before a release, create a release branch from `dev`, for example:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. Release from `release/*` back to `main`
|
||||
|
||||
When release preparation is complete, merge the release branch back into `main` and create a tag:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. Sync `main` back to `dev` after release
|
||||
|
||||
After the release, the same automation still applies. If needed, you can run the workflow manually (`workflow_dispatch`) or execute the fallback commands:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Commit Message Recommendation
|
||||
|
||||
Keep commit messages clear and easy to audit.
|
||||
|
||||
Recommended format:
|
||||
|
||||
```text
|
||||
emoji type(scope): concise description
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): fix DuckDB driver toolchain on Windows AMD64
|
||||
✨ feat(redis): add Stream data browsing support
|
||||
♻️ refactor(datagrid): optimize large-table horizontal scrolling and rendering
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Additional Notes
|
||||
|
||||
- Please include validation results for documentation, build-chain, or driver compatibility changes
|
||||
- For larger changes, opening an issue or draft PR first is recommended
|
||||
- Maintainers may ask contributors to narrow the scope if the change conflicts with the current project direction
|
||||
|
||||
Thank you for contributing.
|
||||
155
CONTRIBUTING.zh-CN.md
Normal file
155
CONTRIBUTING.zh-CN.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# 贡献指南
|
||||
|
||||
感谢你对本项目的贡献。
|
||||
|
||||
本项目采用“发布优先(`main` 为默认分支)+ `release/*` 分支发版”的协作模型。为减少分支漂移与 PR 处理成本,请在提交贡献前先阅读本指南。
|
||||
|
||||
---
|
||||
|
||||
## 分支模型
|
||||
|
||||
- `main`:稳定发布分支,也是仓库默认分支
|
||||
- `dev`:日常开发集成分支,主要供维护者使用
|
||||
- `release/*`:发布准备分支,主要供维护者使用
|
||||
- 外部贡献者建议使用以下分支命名:
|
||||
- `fix/*`:问题修复
|
||||
- `feature/*`:功能新增或增强
|
||||
|
||||
维护者发布流转如下:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 外部贡献者如何提 Pull Request
|
||||
|
||||
无论是 `fix/*` 还是 `feature/*`,**外部贡献者统一直接向 `main` 发起 Pull Request**。
|
||||
|
||||
这样做的原因:
|
||||
|
||||
- `main` 是默认分支,PR 入口更直观
|
||||
- 合并后贡献会直接体现在默认分支
|
||||
- 便于维护者统一做后续同步与发版整理
|
||||
|
||||
建议流程:
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 从你自己的仓库创建分支(建议命名为 `fix/*` 或 `feature/*`)
|
||||
3. 完成代码修改,并进行必要自检
|
||||
4. 推送到你的远程分支
|
||||
5. 向本仓库的 `main` 分支发起 Pull Request
|
||||
|
||||
---
|
||||
|
||||
## Pull Request 要求
|
||||
|
||||
请尽量保证 PR 单一、清晰、可审核。
|
||||
|
||||
建议遵循以下要求:
|
||||
|
||||
- 一个 PR 只解决一类问题,避免混入无关改动
|
||||
- 标题清晰说明改动目的
|
||||
- 描述中说明:
|
||||
- 背景与问题
|
||||
- 变更点
|
||||
- 影响范围
|
||||
- 验证方式
|
||||
- 如涉及 UI 调整,建议附截图或录屏
|
||||
- 如涉及兼容性、数据变更或构建链路调整,请明确说明风险和回滚方式
|
||||
|
||||
---
|
||||
|
||||
## PR 合并策略(维护者)
|
||||
|
||||
`main` 分支上的 PR 建议使用 **Squash and merge**。
|
||||
|
||||
原因:
|
||||
|
||||
- 保持 `main` 历史干净、线性
|
||||
- 每个 PR 在 `main` 上对应一个清晰提交
|
||||
- 降低发布排查与回滚成本
|
||||
|
||||
---
|
||||
|
||||
## 维护者同步规则
|
||||
|
||||
由于外部 PR 会直接合入 `main`,维护者必须及时将 `main` 的变更同步到开发与发布分支,避免分支漂移。
|
||||
|
||||
### 1. main → dev 同步(必做)
|
||||
|
||||
仓库已移除 GitHub Actions 自动回灌 workflow。
|
||||
当前统一采用手动方式将 `main` 同步回 `dev`:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. 发版前从 dev 切 release/*
|
||||
|
||||
发布前由维护者基于 `dev` 创建发布分支,例如:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. release/* → main 发版
|
||||
|
||||
发布准备完成后,将 `release/*` 合并回 `main`,并打标签发布:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. main 回流到 dev(发版后必做)
|
||||
|
||||
发布完成后,仍沿用同一套自动化流程;如有需要,也可以手动触发 `workflow_dispatch`,或执行以下兜底命令,确保开发线与发布线一致:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 提交建议
|
||||
|
||||
建议保持提交信息简洁、明确,便于维护者审查与后续追踪。
|
||||
|
||||
推荐格式:
|
||||
|
||||
```text
|
||||
emoji type(scope): 中文描述
|
||||
```
|
||||
|
||||
示例:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): 修复 Windows AMD64 下 DuckDB 驱动构建工具链
|
||||
✨ feat(redis): 新增 Stream 类型数据浏览支持
|
||||
♻️ refactor(datagrid): 优化大表横向滚动与渲染结构
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 其他说明
|
||||
|
||||
- 文档、构建链路、驱动兼容性相关改动,请尽量附带验证结果
|
||||
- 若改动较大,建议先提 Issue 或 Draft PR,先对齐方案再实施
|
||||
- 如提交内容与项目当前架构方向冲突,维护者可能要求收敛范围后再合并
|
||||
|
||||
感谢你的贡献。
|
||||
236
README.md
236
README.md
@@ -1,4 +1,4 @@
|
||||
# GoNavi - 现代化的轻量级数据库管理工具
|
||||
# GoNavi - A Modern Lightweight Database Client
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
@@ -6,11 +6,51 @@
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**GoNavi** 是一款基于 **Wails (Go)** 和 **React** 构建的现代化、高性能、跨平台数据库管理客户端。它旨在提供如原生应用般流畅的用户体验,同时保持极低的资源占用。
|
||||
**Language**: English | [简体中文](README.zh-CN.md)
|
||||
|
||||
相比于 Electron 应用,GoNavi 的体积更小(~10MB),启动速度更快,内存占用更低。
|
||||
GoNavi is a modern, high-performance, cross-platform database client built with **Wails (Go)** and **React**.
|
||||
It delivers native-like responsiveness with low resource usage.
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
Compared with many Electron-based clients, GoNavi is typically smaller in binary size (around 10MB class), starts faster, and uses less memory.
|
||||
|
||||
---
|
||||
|
||||
## Project Overview
|
||||
|
||||
GoNavi is designed for developers and DBAs who need a unified desktop experience across multiple databases.
|
||||
|
||||
- **Native-performance architecture**: Wails (Go + WebView) with lightweight runtime overhead.
|
||||
- **Large dataset usability**: virtualized rendering and optimized DataGrid workflows for high-volume tables.
|
||||
- **Unified connectivity**: URI build/parse, SSH tunnel, proxy support, and on-demand driver activation.
|
||||
- **Production-oriented workflow**: SQL editor, object management, batch export/backup, sync tools, execution logs, and update checks.
|
||||
|
||||
## Supported Data Sources
|
||||
|
||||
> `Built-in`: available out of the box.
|
||||
> `Optional driver agent`: install/enable via Driver Manager first.
|
||||
|
||||
| Category | Data Source | Driver Mode | Typical Capabilities |
|
||||
|---|---|---|---|
|
||||
| Relational | MySQL | Built-in | Schema browsing, SQL query, data editing, export/backup |
|
||||
| Relational | PostgreSQL | Built-in | Schema browsing, SQL query, data editing, object management |
|
||||
| Relational | Oracle | Built-in | Query execution, object browsing, data editing |
|
||||
| Cache | Redis | Built-in | Key browsing, command execution, encoding/view switch |
|
||||
| Relational | MariaDB | Optional driver agent | Querying, object management, data editing |
|
||||
| Relational | Doris | Optional driver agent | Querying, object browsing, SQL execution |
|
||||
| Search | Sphinx | Optional driver agent | SphinxQL querying and object browsing |
|
||||
| Relational | SQL Server | Optional driver agent | Schema browsing, SQL query, object management |
|
||||
| File-based | SQLite | Optional driver agent | Local DB browsing, editing, export |
|
||||
| File-based | DuckDB | Optional driver agent | Large-table query, pagination, file-DB workflow |
|
||||
| Domestic DB | Dameng | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Kingbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | HighGo | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Vastbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Document | MongoDB | Optional driver agent | Document query, collection browsing, connection management |
|
||||
| Time-series | TDengine | Optional driver agent | Time-series schema browsing and querying |
|
||||
| Columnar Analytics | ClickHouse | Optional driver agent | Analytical query, object browsing, SQL execution |
|
||||
| Extensibility | Custom Driver/DSN | Custom | Extend to more data sources via Driver + DSN |
|
||||
|
||||
<h2 align="center">📸 Screenshots</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
@@ -24,139 +64,149 @@
|
||||
|
||||
---
|
||||
|
||||
## ✨ 核心特性
|
||||
## Key Features
|
||||
|
||||
### 🚀 极致性能
|
||||
- **零卡顿交互**:采用独创的 "幽灵拖拽" (Ghost Resizing) 技术,在包含数万行数据的表格中调整列宽,依然保持 60fps+ 的丝滑体验。
|
||||
- **虚拟滚动**:轻松处理海量数据展示,拒绝卡顿。
|
||||
### Performance
|
||||
- **Smooth interaction under load**: optimized table interaction (including column resize workflow on large datasets).
|
||||
- **Virtualized rendering**: keeps large result sets responsive.
|
||||
|
||||
### 🔌 多数据库支持
|
||||
- **MySQL**:完整支持,涵盖数据编辑、结构管理与导入导出。
|
||||
- **PostgreSQL**:数据查看与编辑支持,事务提交能力持续完善。
|
||||
- **SQLite**:本地文件数据库支持。
|
||||
- **Oracle**:基础数据访问与编辑支持。
|
||||
- **Dameng(达梦)**:基础数据访问与编辑支持。
|
||||
- **Kingbase(人大金仓)**:基础数据访问与编辑支持。
|
||||
- **Redis**:Key/Value 浏览、命令执行、视图与编码切换。
|
||||
- **自定义驱动**:支持配置 Driver/DSN 接入更多数据源。
|
||||
- **SSH 隧道**:内置 SSH 隧道支持,安全连接内网数据库。
|
||||
### Data Management (DataGrid)
|
||||
- In-place cell editing.
|
||||
- Batch insert/update/delete with transaction-oriented submit/rollback.
|
||||
- Large-field popup editor.
|
||||
- Context actions (set NULL, copy/export, etc.).
|
||||
- Smart read/write mode switching based on query context.
|
||||
- Export formats: CSV, Excel (XLSX), JSON, Markdown.
|
||||
|
||||
### 📊 强大的数据管理 (DataGrid)
|
||||
- **所见即所得编辑**:直接在表格中双击单元格修改数据。
|
||||
- **批量事务操作**:支持批量新增、修改、删除,一键提交或回滚事务。
|
||||
- **大字段编辑**:双击大字段自动打开弹窗编辑器,避免卡顿。
|
||||
- **右键上下文菜单**:快速设置 NULL、复制/导出等操作。
|
||||
- **智能上下文**:自动识别单表查询,解锁编辑功能;复杂查询自动切换为只读模式。
|
||||
- **批量导出/备份**:支持表与数据库的批量导出/备份。
|
||||
- **数据导出**:支持 CSV、Excel (XLSX)、JSON、Markdown 等格式。
|
||||
### SQL Editor
|
||||
- Monaco Editor core.
|
||||
- Context-aware completion for databases/tables/columns.
|
||||
- Multi-tab query workflow.
|
||||
|
||||
### 🧰 批量导出/备份
|
||||
- **数据库批量导出**:支持结构导出与结构+数据备份。
|
||||
- **表批量导出**:支持多表一键导出/备份。
|
||||
- **智能上下文检测**:自动判断目标范围,避免误操作。
|
||||
### Batch Export / Backup
|
||||
- Database-level and table-level batch export/backup.
|
||||
- Scope-aware operation flow to reduce mistakes.
|
||||
|
||||
### 🧩 Redis 视图与编码
|
||||
- **视图模式切换**:自动/原始文本/UTF-8/十六进制多模式显示。
|
||||
- **智能解码**:针对二进制值进行 UTF-8 质量判定与中文字符识别。
|
||||
- **命令执行**:内置命令面板快速操作。
|
||||
### Connectivity
|
||||
- URI generation/parsing.
|
||||
- SSH tunnel support.
|
||||
- Proxy support.
|
||||
- Config import/export (JSON).
|
||||
- Optional driver management and activation.
|
||||
|
||||
### 🔄 数据同步与导入导出
|
||||
- **连接配置导入/导出**:支持配置 JSON 导入导出,便于团队共享。
|
||||
- **数据同步**:内置数据同步面板,支持跨库同步任务配置。
|
||||
### Redis Tools
|
||||
- Multi-view value rendering (auto/raw text/UTF-8/hex).
|
||||
- Built-in command execution panel.
|
||||
|
||||
### 🆙 在线更新
|
||||
- **自动更新**:启动/定时/手动检查更新,自动下载并提示重启完成更新。
|
||||
### Observability and Update
|
||||
- SQL execution logs with timing information.
|
||||
- Startup/scheduled/manual update checks.
|
||||
|
||||
### 🧾 可观测性
|
||||
- **SQL 执行日志**:实时查看 SQL 与执行耗时,便于排障与优化。
|
||||
|
||||
### 📝 智能 SQL 编辑器
|
||||
- **Monaco Editor 内核**:集成 VS Code 同款编辑器,体验极佳。
|
||||
- **智能补全**:自动感知当前连接上下文,提供数据库、表名、字段名的实时补全。
|
||||
- **多标签页**:支持多窗口并行操作,像浏览器一样管理你的查询会话。
|
||||
|
||||
### 🎨 现代化 UI
|
||||
- **Ant Design 5**:企业级 UI 设计语言。
|
||||
- **暗黑模式**:内置深色/浅色主题切换,适应不同光照环境。
|
||||
- **响应式布局**:灵活的侧边栏与布局调整。
|
||||
### UI/UX
|
||||
- Ant Design 5 based interface.
|
||||
- Light/Dark themes.
|
||||
- Flexible sidebar and layout behavior.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ 技术栈
|
||||
## Tech Stack
|
||||
|
||||
* **后端 (Backend)**: Go 1.24 + Wails v2
|
||||
* **前端 (Frontend)**: React 18 + TypeScript + Vite
|
||||
* **UI 框架**: Ant Design 5
|
||||
* **状态管理**: Zustand
|
||||
* **编辑器**: Monaco Editor
|
||||
- **Backend**: Go 1.24 + Wails v2
|
||||
- **Frontend**: React 18 + TypeScript + Vite
|
||||
- **UI**: Ant Design 5
|
||||
- **State Management**: Zustand
|
||||
- **Editor**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 📦 安装与运行
|
||||
## Installation and Run
|
||||
|
||||
### 前置要求
|
||||
* [Go](https://go.dev/dl/) 1.21+
|
||||
* [Node.js](https://nodejs.org/) 18+
|
||||
* [Wails CLI](https://wails.io/docs/gettingstarted/installation): `go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
### Prerequisites
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
### Development Mode
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
# Clone
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发服务器 (支持热重载)
|
||||
# Start development with hot reload
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# 构建当前平台的可执行文件
|
||||
# Build for current platform
|
||||
wails build
|
||||
|
||||
# 清理并构建 (推荐发布前使用)
|
||||
# Clean build (recommended before release)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物将位于 `build/bin` 目录下。
|
||||
Artifacts are generated in `build/bin`.
|
||||
|
||||
### 跨平台编译 (GitHub Actions)
|
||||
### Cross-Platform Release (GitHub Actions)
|
||||
|
||||
本项目内置了 GitHub Actions 流水线,Push `v*` 格式的 Tag 即可自动触发构建并发布 Release。
|
||||
支持构建:
|
||||
* macOS (AMD64 / ARM64)
|
||||
* Windows (AMD64)
|
||||
The repository includes a release workflow.
|
||||
Push a `v*` tag to trigger automated build and release.
|
||||
Release notes are generated automatically from merged pull requests and categorized by `.github/release.yaml`.
|
||||
|
||||
Target artifacts include:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64, WebKitGTK 4.0 and 4.1 variants)
|
||||
|
||||
---
|
||||
|
||||
## ❓ 常见问题 (Troubleshooting)
|
||||
## Troubleshooting
|
||||
|
||||
### macOS 提示 "应用已损坏,无法打开"
|
||||
### macOS: "App is damaged and can’t be opened"
|
||||
|
||||
由于本项目尚未购买 Apple 开发者证书进行签名(Notarization),macOS 的 Gatekeeper 安全机制可能会拦截应用的运行。请按照以下步骤解决:
|
||||
Without Apple notarization, Gatekeeper may block startup.
|
||||
|
||||
1. 将下载的 `GoNavi.app` 拖入 **应用程序** 文件夹。
|
||||
2. 打开 **终端 (Terminal)**。
|
||||
3. 复制并执行以下命令(输入密码时不会显示):
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
4. 或者:在 Finder 中右键点击应用图标,按住 `Control` 键选择 **打开**,然后在弹出的窗口中再次点击 **打开**。
|
||||
1. Move `GoNavi.app` to **Applications**.
|
||||
2. Open **Terminal**.
|
||||
3. Run:
|
||||
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
Or right-click the app in Finder and choose **Open** with Control key flow.
|
||||
|
||||
### Linux: missing `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
GoNavi depends on WebKitGTK runtime libraries.
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.1-0 libjavascriptcoregtk-4.1-0
|
||||
|
||||
# Ubuntu 22.04 / Debian 12
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
If you use Linux artifacts with the `-WebKit41` suffix, prefer Debian 13 / Ubuntu 24.04+.
|
||||
|
||||
---
|
||||
|
||||
## 🤝 贡献指南
|
||||
## Contributing
|
||||
|
||||
欢迎提交 Issue 和 Pull Request!
|
||||
Issues and pull requests are welcome.
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 创建你的特性分支 (`git checkout -b feature/AmazingFeature`)
|
||||
3. 提交你的改动 (`git commit -m 'feat: Add some AmazingFeature'`)
|
||||
4. 推送到分支 (`git push origin feature/AmazingFeature`)
|
||||
5. 开启一个 Pull Request
|
||||
For the full workflow, branch model, and maintainer sync rules, see:
|
||||
|
||||
## 📄 开源协议
|
||||
- [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE) 开源。
|
||||
External contributors should open pull requests directly against `main`.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [Apache-2.0](LICENSE).
|
||||
|
||||
195
README.zh-CN.md
Normal file
195
README.zh-CN.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# GoNavi - 现代化轻量级数据库客户端
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
[](https://reactjs.org/)
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**语言**: [English](README.md) | 简体中文
|
||||
|
||||
GoNavi 是基于 **Wails (Go)** 与 **React** 构建的跨平台数据库管理工具,强调原生性能、低资源占用与多数据源统一工作流。
|
||||
|
||||
相比常见 Electron 客户端,GoNavi 在体积、启动速度和内存占用上更轻量。
|
||||
|
||||
---
|
||||
|
||||
## 项目简介
|
||||
|
||||
GoNavi 面向开发者与 DBA,核心目标是让数据库操作在桌面端做到“快、稳、统一”。
|
||||
|
||||
- **原生性能架构**:Wails(Go + WebView),降低运行时开销。
|
||||
- **大数据可用性**:虚拟滚动 + DataGrid 交互优化,提升大结果集可操作性。
|
||||
- **统一连接能力**:支持 URI 生成/解析、SSH 隧道、代理、驱动按需安装。
|
||||
- **工程化能力完整**:覆盖 SQL 编辑、对象管理、批量导出/备份、数据同步、执行日志、在线更新。
|
||||
|
||||
## 支持的数据源
|
||||
|
||||
> `内置`:主程序开箱即用。
|
||||
> `可选驱动代理`:需在驱动管理中安装启用后可用。
|
||||
|
||||
| 类别 | 数据源 | 驱动模式 | 典型能力 |
|
||||
|---|---|---|---|
|
||||
| 关系型 | MySQL | 内置 | 库表浏览、SQL 查询、数据编辑、导出/备份 |
|
||||
| 关系型 | PostgreSQL | 内置 | 库表浏览、SQL 查询、数据编辑、对象管理 |
|
||||
| 关系型 | Oracle | 内置 | 连接查询、对象浏览、数据编辑 |
|
||||
| 缓存 | Redis | 内置 | Key 浏览、命令执行、编码/视图切换 |
|
||||
| 关系型 | MariaDB | 可选驱动代理 | 连接查询、对象管理、数据编辑 |
|
||||
| 关系型 | Doris | 可选驱动代理 | 连接查询、对象浏览、SQL 执行 |
|
||||
| 搜索 | Sphinx | 可选驱动代理 | SphinxQL 查询与对象浏览 |
|
||||
| 关系型 | SQL Server | 可选驱动代理 | 库表浏览、SQL 查询、对象管理 |
|
||||
| 文件型 | SQLite | 可选驱动代理 | 本地文件库浏览、编辑、导出 |
|
||||
| 文件型 | DuckDB | 可选驱动代理 | 大表查询、分页浏览、文件库管理 |
|
||||
| 国产数据库 | Dameng | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Kingbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | HighGo | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Vastbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 文档型 | MongoDB | 可选驱动代理 | 文档查询、集合浏览、连接管理 |
|
||||
| 时序 | TDengine | 可选驱动代理 | 时序库表浏览、查询分析 |
|
||||
| 列式分析 | ClickHouse | 可选驱动代理 | 分析查询、对象浏览、SQL 执行 |
|
||||
| 扩展接入 | Custom Driver/DSN | 自定义 | 通过 Driver + DSN 接入更多数据源 |
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/224a74e7-65df-4aef-9710-d8e82e3a70c1" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/ec522145-5ceb-4481-ae46-a9251c89bdfc" />
|
||||
<br />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/330ce49b-45f1-4919-ae14-75f7d47e5f73" />
|
||||
<img width="14%" alt="image" src="https://github.com/user-attachments/assets/d15fa9e9-5486-423b-a0e9-53b467e45432" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/f0c57590-d987-4ecf-89b2-64efad60b6d7" />
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## 核心特性
|
||||
|
||||
### 性能与交互
|
||||
- 大数据场景下保持流畅交互(含 DataGrid 列宽拖拽、批量编辑流程优化)。
|
||||
- 虚拟滚动渲染,降低大结果集卡顿风险。
|
||||
|
||||
### 数据管理(DataGrid)
|
||||
- 单元格所见即所得编辑。
|
||||
- 批量新增/修改/删除,支持事务提交与回滚。
|
||||
- 大字段弹窗编辑。
|
||||
- 右键上下文操作(NULL、复制、导出等)。
|
||||
- 根据查询上下文智能切换读写模式。
|
||||
- 支持 CSV / XLSX / JSON / Markdown 导出。
|
||||
|
||||
### SQL 编辑器
|
||||
- 基于 Monaco Editor。
|
||||
- 上下文补全(数据库/表/字段)。
|
||||
- 多标签查询工作流。
|
||||
|
||||
### 连接与驱动
|
||||
- URI 生成与解析。
|
||||
- SSH 隧道、代理支持。
|
||||
- 连接配置 JSON 导入/导出。
|
||||
- 可选驱动安装与启用管理。
|
||||
|
||||
### Redis 工具
|
||||
- 自动/原始文本/UTF-8/十六进制等视图模式。
|
||||
- 内置命令执行面板。
|
||||
|
||||
### 可观测性与更新
|
||||
- SQL 执行日志(含耗时)。
|
||||
- 启动/定时/手动更新检查。
|
||||
|
||||
### UI 体验
|
||||
- Ant Design 5 体系。
|
||||
- 深色/浅色主题切换。
|
||||
- 灵活布局与侧边栏行为。
|
||||
|
||||
---
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **后端**: Go 1.24 + Wails v2
|
||||
- **前端**: React 18 + TypeScript + Vite
|
||||
- **UI 框架**: Ant Design 5
|
||||
- **状态管理**: Zustand
|
||||
- **编辑器**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 安装与运行
|
||||
|
||||
### 前置要求
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发(热重载)
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
|
||||
```bash
|
||||
# 构建当前平台
|
||||
wails build
|
||||
|
||||
# 清理后构建(发布前推荐)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物位于 `build/bin`。
|
||||
|
||||
### 跨平台发布(GitHub Actions)
|
||||
|
||||
仓库内置发布流水线,推送 `v*` Tag 可自动构建并发布 Release。
|
||||
Release 更新说明会基于已合并 Pull Request 自动生成,并按 `.github/release.yaml` 分类。
|
||||
|
||||
支持目标:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64,含 WebKitGTK 4.0 / 4.1 变体)
|
||||
|
||||
---
|
||||
|
||||
## 常见问题
|
||||
|
||||
### macOS 提示“应用已损坏,无法打开”
|
||||
|
||||
在未进行 Apple Notarization 时,Gatekeeper 可能拦截应用。
|
||||
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
### Linux 缺少 `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.1-0 libjavascriptcoregtk-4.1-0
|
||||
|
||||
# Ubuntu 22.04 / Debian 12
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 贡献指南
|
||||
|
||||
欢迎提交 Issue 与 Pull Request。
|
||||
|
||||
完整流程、分支模型与维护者同步规则请查看:
|
||||
|
||||
- [CONTRIBUTING.zh-CN.md](CONTRIBUTING.zh-CN.md)
|
||||
|
||||
外部贡献者统一直接向 `main` 发起 Pull Request。
|
||||
|
||||
## 开源协议
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE)。
|
||||
228
build-driver-agents.sh
Executable file
228
build-driver-agents.sh
Executable file
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
DEFAULT_DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
用法:
|
||||
./build-driver-agents.sh [选项]
|
||||
|
||||
选项:
|
||||
--drivers <列表> 指定驱动列表(逗号分隔),例如:kingbase,mongodb
|
||||
--platform <GOOS/GOARCH>
|
||||
目标平台,默认使用当前 Go 环境(go env GOOS/GOARCH)
|
||||
--out-dir <目录> 输出目录根路径,默认:dist/driver-agents
|
||||
--bundle-name <文件名> 驱动总包 zip 名称,默认:GoNavi-DriverAgents.zip
|
||||
--strict 任一驱动构建失败即中断(默认失败后继续,最后汇总)
|
||||
-h, --help 显示帮助
|
||||
|
||||
示例:
|
||||
./build-driver-agents.sh
|
||||
./build-driver-agents.sh --drivers kingbase
|
||||
./build-driver-agents.sh --platform windows/amd64 --drivers kingbase,mongodb
|
||||
EOF
|
||||
}
|
||||
|
||||
normalize_driver() {
|
||||
local name
|
||||
name="$(echo "${1:-}" | tr '[:upper:]' '[:lower:]' | xargs)"
|
||||
case "$name" in
|
||||
doris|diros) echo "doris" ;;
|
||||
mariadb|sphinx|sqlserver|sqlite|duckdb|dameng|kingbase|highgo|vastbase|mongodb|tdengine|clickhouse)
|
||||
echo "$name"
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
build_driver_name() {
|
||||
case "$1" in
|
||||
doris) echo "diros" ;;
|
||||
*) echo "$1" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
platform_dir_name() {
|
||||
case "$1" in
|
||||
windows) echo "Windows" ;;
|
||||
darwin) echo "MacOS" ;;
|
||||
linux) echo "Linux" ;;
|
||||
*) echo "Unknown" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
driver_csv=""
|
||||
target_platform=""
|
||||
out_root="dist/driver-agents"
|
||||
bundle_name="GoNavi-DriverAgents.zip"
|
||||
strict_mode="false"
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--drivers)
|
||||
driver_csv="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--platform)
|
||||
target_platform="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--out-dir)
|
||||
out_root="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--bundle-name)
|
||||
bundle_name="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--strict)
|
||||
strict_mode="true"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "❌ 未知参数:$1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if ! command -v go >/dev/null 2>&1; then
|
||||
echo "❌ 未找到 Go,请先安装 Go 并确保 go 在 PATH 中。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$target_platform" ]]; then
|
||||
target_platform="$(go env GOOS)/$(go env GOARCH)"
|
||||
fi
|
||||
|
||||
if [[ "$target_platform" != */* ]]; then
|
||||
echo "❌ --platform 参数格式错误,应为 GOOS/GOARCH,例如 darwin/arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
goos="${target_platform%%/*}"
|
||||
goarch="${target_platform##*/}"
|
||||
platform_key="${goos}-${goarch}"
|
||||
platform_dir="$(platform_dir_name "$goos")"
|
||||
|
||||
declare -a drivers=()
|
||||
if [[ -n "$driver_csv" ]]; then
|
||||
IFS=',' read -r -a raw_drivers <<<"$driver_csv"
|
||||
for item in "${raw_drivers[@]}"; do
|
||||
normalized="$(normalize_driver "$item")" || {
|
||||
echo "❌ 不支持的驱动:$item"
|
||||
exit 1
|
||||
}
|
||||
drivers+=("$normalized")
|
||||
done
|
||||
else
|
||||
drivers=("${DEFAULT_DRIVERS[@]}")
|
||||
fi
|
||||
|
||||
output_dir="${out_root%/}/${platform_key}"
|
||||
bundle_stage_dir="$(mktemp -d "${TMPDIR:-/tmp}/gonavi-driver-bundle.XXXXXX")"
|
||||
bundle_platform_dir="$bundle_stage_dir/$platform_dir"
|
||||
|
||||
cleanup() {
|
||||
rm -rf "$bundle_stage_dir"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
mkdir -p "$output_dir" "$bundle_platform_dir"
|
||||
output_dir_abs="$(cd "$output_dir" && pwd)"
|
||||
bundle_zip_path="$output_dir_abs/$bundle_name"
|
||||
|
||||
declare -a built_assets=()
|
||||
declare -a failed_drivers=()
|
||||
declare -a skipped_drivers=()
|
||||
|
||||
echo "🚀 开始构建 optional-driver-agent"
|
||||
echo " 平台:$goos/$goarch"
|
||||
echo " 输出目录:$output_dir_abs"
|
||||
echo " 驱动列表:${drivers[*]}"
|
||||
|
||||
for driver in "${drivers[@]}"; do
|
||||
if [[ "$driver" == "duckdb" && "$goos" == "windows" && "$goarch" != "amd64" ]]; then
|
||||
echo "⚠️ 跳过 duckdb(仅支持 windows/amd64)"
|
||||
skipped_drivers+=("$driver")
|
||||
continue
|
||||
fi
|
||||
|
||||
build_driver="$(build_driver_name "$driver")"
|
||||
tag="gonavi_${build_driver}_driver"
|
||||
asset_name="${driver}-driver-agent-${goos}-${goarch}"
|
||||
if [[ "$goos" == "windows" ]]; then
|
||||
asset_name="${asset_name}.exe"
|
||||
fi
|
||||
output_path="$output_dir_abs/$asset_name"
|
||||
|
||||
cgo_enabled=0
|
||||
if [[ "$driver" == "duckdb" ]]; then
|
||||
cgo_enabled=1
|
||||
fi
|
||||
|
||||
echo "🔧 构建 $driver -> $asset_name (tag=$tag, CGO_ENABLED=$cgo_enabled)"
|
||||
set +e
|
||||
CGO_ENABLED="$cgo_enabled" GOOS="$goos" GOARCH="$goarch" GOTOOLCHAIN=auto \
|
||||
go build -tags "$tag" -trimpath -ldflags "-s -w" -o "$output_path" ./cmd/optional-driver-agent
|
||||
build_exit=$?
|
||||
set -e
|
||||
|
||||
if [[ $build_exit -ne 0 ]]; then
|
||||
echo "❌ 构建失败:$driver"
|
||||
failed_drivers+=("$driver")
|
||||
if [[ "$strict_mode" == "true" ]]; then
|
||||
exit $build_exit
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
|
||||
cp "$output_path" "$bundle_platform_dir/$asset_name"
|
||||
built_assets+=("$asset_name")
|
||||
done
|
||||
|
||||
if [[ ${#built_assets[@]} -eq 0 ]]; then
|
||||
echo "❌ 未成功构建任何驱动代理。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$bundle_zip_path"
|
||||
if command -v zip >/dev/null 2>&1; then
|
||||
(
|
||||
cd "$bundle_stage_dir"
|
||||
zip -qry "$bundle_zip_path" "$platform_dir"
|
||||
)
|
||||
elif command -v ditto >/dev/null 2>&1; then
|
||||
(
|
||||
cd "$bundle_stage_dir"
|
||||
ditto -c -k --sequesterRsrc --keepParent "$platform_dir" "$bundle_zip_path"
|
||||
)
|
||||
else
|
||||
echo "❌ 未找到 zip/ditto,无法生成驱动总包 zip。"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ 构建完成"
|
||||
echo " 单文件输出目录:$output_dir_abs"
|
||||
echo " 驱动总包:$bundle_zip_path"
|
||||
echo " 已构建:${built_assets[*]}"
|
||||
if [[ ${#skipped_drivers[@]} -gt 0 ]]; then
|
||||
echo " 已跳过:${skipped_drivers[*]}"
|
||||
fi
|
||||
if [[ ${#failed_drivers[@]} -gt 0 ]]; then
|
||||
echo "⚠️ 构建失败驱动:${failed_drivers[*]}"
|
||||
exit 2
|
||||
fi
|
||||
353
build-release.sh
353
build-release.sh
@@ -12,7 +12,7 @@ if [ -z "$VERSION" ]; then
|
||||
VERSION="0.0.0"
|
||||
fi
|
||||
echo "ℹ️ 检测到版本号: $VERSION"
|
||||
LDFLAGS="-X GoNavi-Wails/internal/app.AppVersion=$VERSION"
|
||||
LDFLAGS="-s -w -X GoNavi-Wails/internal/app.AppVersion=$VERSION"
|
||||
|
||||
# 颜色配置
|
||||
GREEN='\033[0;32m'
|
||||
@@ -20,6 +20,75 @@ RED='\033[0;31m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
get_file_size_bytes() {
|
||||
local target="$1"
|
||||
if [ ! -f "$target" ]; then
|
||||
echo 0
|
||||
return
|
||||
fi
|
||||
if stat -f%z "$target" >/dev/null 2>&1; then
|
||||
stat -f%z "$target"
|
||||
return
|
||||
fi
|
||||
if stat -c%s "$target" >/dev/null 2>&1; then
|
||||
stat -c%s "$target"
|
||||
return
|
||||
fi
|
||||
wc -c <"$target" | tr -d '[:space:]'
|
||||
}
|
||||
|
||||
format_size_mb() {
|
||||
local bytes="${1:-0}"
|
||||
awk -v b="$bytes" 'BEGIN { printf "%.2fMB", b / 1024 / 1024 }'
|
||||
}
|
||||
|
||||
try_compress_binary_with_upx() {
|
||||
local exe_path="$1"
|
||||
local label="$2"
|
||||
if [ ! -f "$exe_path" ]; then
|
||||
echo -e "${RED} ❌ 未找到 ${label} 文件:$exe_path${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v upx >/dev/null 2>&1; then
|
||||
echo -e "${RED} ❌ 未找到 upx,${label} 必须进行压缩后才能继续打包。${NC}"
|
||||
case "$(uname -s)" in
|
||||
Darwin)
|
||||
echo " 安装命令: brew install upx"
|
||||
;;
|
||||
Linux)
|
||||
echo " 安装命令: sudo apt-get install -y upx-ucl (或对应发行版包管理器)"
|
||||
;;
|
||||
esac
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local before_bytes after_bytes
|
||||
before_bytes=$(get_file_size_bytes "$exe_path")
|
||||
echo " 🗜️ 正在使用 UPX 压缩 ${label}..."
|
||||
if upx --best --lzma --force "$exe_path" >/dev/null 2>&1; then
|
||||
if ! upx -t "$exe_path" >/dev/null 2>&1; then
|
||||
echo -e "${RED} ❌ UPX 校验失败:${label}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
after_bytes=$(get_file_size_bytes "$exe_path")
|
||||
if [ "$after_bytes" -lt "$before_bytes" ]; then
|
||||
local saved_bytes=$((before_bytes - after_bytes))
|
||||
echo " ✅ UPX 压缩完成: $(format_size_mb "$before_bytes") -> $(format_size_mb "$after_bytes"),减少 $(format_size_mb "$saved_bytes")"
|
||||
else
|
||||
echo " ℹ️ UPX 压缩完成: $(format_size_mb "$before_bytes") -> $(format_size_mb "$after_bytes")"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ UPX 压缩失败:${label}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
MAC_VOLICON_PATH="build/darwin/icon.icns"
|
||||
if [ ! -f "$MAC_VOLICON_PATH" ]; then
|
||||
MAC_VOLICON_PATH=""
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}🚀 开始构建 $APP_NAME $VERSION...${NC}"
|
||||
|
||||
# 清理并创建输出目录
|
||||
@@ -36,47 +105,101 @@ if [ $? -eq 0 ]; then
|
||||
|
||||
# 移动 .app 到 dist
|
||||
mv "$APP_SRC" "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
APP_BIN_PATH=$(find "$DIST_DIR/$APP_DEST_NAME/Contents/MacOS" -maxdepth 1 -type f -print -quit)
|
||||
if [ -n "$APP_BIN_PATH" ] && [ -f "$APP_BIN_PATH" ]; then
|
||||
echo -e "${YELLOW} ⚠️ macOS arm64 不再执行 UPX 压缩,保留原始主程序。${NC}"
|
||||
else
|
||||
echo -e "${RED} ❌ 未找到 macOS arm64 主程序文件。${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 创建 DMG
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (arm64)..."
|
||||
# 移除已存在的 DMG (以防万一)
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
|
||||
create-dmg \
|
||||
--volname "${APP_NAME} ${VERSION}" \
|
||||
--volicon "build/appicon.icns" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 检查是否生成了 rw.* 的临时文件并重命名 (create-dmg 有时会有此行为)
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到临时文件名,正在重命名...${NC}"
|
||||
mv "$RW_FILE" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
# Ad-hoc 代码签名(无 Apple Developer 账号时防止 Gatekeeper 报已损坏)
|
||||
echo " 🔏 正在对 .app 进行 ad-hoc 签名 (arm64)..."
|
||||
codesign --force --deep --sign - "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 创建 DMG
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (arm64)..."
|
||||
# 移除已存在的 DMG (以防万一)
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
# create-dmg 的 source 需要是“包含 .app 的目录”,不能直接传 .app 路径。
|
||||
STAGE_DIR=$(mktemp -d "$DIST_DIR/.dmg-stage-${APP_NAME}-${VERSION}-arm64.XXXXXX")
|
||||
if [ -z "$STAGE_DIR" ] || [ ! -d "$STAGE_DIR" ]; then
|
||||
echo -e "${RED} ❌ 创建 DMG 临时目录失败,跳过 DMG 打包。${NC}"
|
||||
else
|
||||
if command -v ditto &> /dev/null; then
|
||||
ditto "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
else
|
||||
cp -R "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
fi
|
||||
|
||||
# --sandbox-safe 会跳过 Finder 的 AppleScript 排版,避免打包过程中弹出/打开挂载窗口(CI/本地静默打包更友好)。
|
||||
CREATE_DMG_ARGS=(--volname "${APP_NAME} ${VERSION}" --format UDZO --sandbox-safe)
|
||||
if [ -n "$MAC_VOLICON_PATH" ]; then
|
||||
CREATE_DMG_ARGS+=(--volicon "$MAC_VOLICON_PATH")
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 macOS 卷图标 (build/darwin/icon.icns),跳过 --volicon。${NC}"
|
||||
fi
|
||||
|
||||
# 删除中间的 .app 文件,保持目录整洁
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
else
|
||||
echo -e "${RED} ❌ DMG 生成失败,请检查 create-dmg 输出。${NC}"
|
||||
create-dmg "${CREATE_DMG_ARGS[@]}" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$STAGE_DIR"
|
||||
|
||||
CREATE_DMG_EXIT_CODE=$?
|
||||
rm -rf "$STAGE_DIR"
|
||||
|
||||
if [ $CREATE_DMG_EXIT_CODE -ne 0 ]; then
|
||||
echo -e "${RED} ❌ create-dmg 执行失败 (exit=$CREATE_DMG_EXIT_CODE),保留 .app 以便排查。${NC}"
|
||||
else
|
||||
# create-dmg 可能会在失败时遗留 rw.*.dmg 中间产物;不要直接当作最终产物使用
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -maxdepth 1 -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 create-dmg 中间产物: $(basename "$RW_FILE"),正在转换为可分发 DMG...${NC}"
|
||||
hdiutil convert "$RW_FILE" -format UDZO -o "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
rm -f "$RW_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# 防御性:即使生成了目标文件,也要确保不是 UDRW(UDRW 在 Finder 下可能表现为“已损坏/无法打开”)
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
DMG_FORMAT=$(hdiutil imageinfo "$DIST_DIR/$DMG_NAME" 2>/dev/null | awk -F': ' '/^Format:/{print $2; exit}')
|
||||
if [ "$DMG_FORMAT" = "UDRW" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 UDRW(可写原始映像),正在转换为 UDZO...${NC}"
|
||||
TMP_UDZO="$DIST_DIR/.tmp.$DMG_NAME"
|
||||
rm -f "$TMP_UDZO"
|
||||
hdiutil convert "$DIST_DIR/$DMG_NAME" -format UDZO -o "$TMP_UDZO" >/dev/null 2>&1 && mv "$TMP_UDZO" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
hdiutil verify "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED} ❌ DMG 校验失败,保留 .app 以便排查。${NC}"
|
||||
else
|
||||
# 删除中间的 .app 文件,保持目录整洁
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具,跳过 DMG 打包,仅保留 .app。${NC}"
|
||||
echo " 安装命令: brew install create-dmg"
|
||||
fi
|
||||
else
|
||||
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo -e "${RED} ❌ DMG 生成失败,请检查 create-dmg 输出。${NC}"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具,跳过 DMG 打包,仅保留 .app。${NC}"
|
||||
echo " 安装命令: brew install create-dmg"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS arm64 构建失败。${NC}"
|
||||
fi
|
||||
|
||||
@@ -89,44 +212,96 @@ if [ $? -eq 0 ]; then
|
||||
DMG_NAME="${APP_NAME}-${VERSION}-mac-amd64.dmg"
|
||||
|
||||
mv "$APP_SRC" "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (amd64)..."
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
|
||||
create-dmg \
|
||||
--volname "${APP_NAME} ${VERSION}" \
|
||||
--volicon "build/appicon.icns" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
# 检查是否生成了 rw.* 的临时文件并重命名
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到临时文件名,正在重命名...${NC}"
|
||||
mv "$RW_FILE" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
else
|
||||
echo -e "${RED} ❌ DMG 生成失败。${NC}"
|
||||
fi
|
||||
APP_BIN_PATH=$(find "$DIST_DIR/$APP_DEST_NAME/Contents/MacOS" -maxdepth 1 -type f -print -quit)
|
||||
if [ -n "$APP_BIN_PATH" ] && [ -f "$APP_BIN_PATH" ]; then
|
||||
echo -e "${YELLOW} ⚠️ macOS amd64 不再执行 UPX 压缩,保留原始主程序。${NC}"
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具。${NC}"
|
||||
echo -e "${RED} ❌ 未找到 macOS amd64 主程序文件。${NC}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS amd64 构建失败。${NC}"
|
||||
|
||||
# Ad-hoc 代码签名
|
||||
echo " 🔏 正在对 .app 进行 ad-hoc 签名 (amd64)..."
|
||||
codesign --force --deep --sign - "$DIST_DIR/$APP_DEST_NAME"
|
||||
|
||||
if command -v create-dmg &> /dev/null; then
|
||||
echo " 📦 正在打包 DMG (amd64)..."
|
||||
rm -f "$DIST_DIR/$DMG_NAME"
|
||||
# create-dmg 的 source 需要是“包含 .app 的目录”,不能直接传 .app 路径。
|
||||
STAGE_DIR=$(mktemp -d "$DIST_DIR/.dmg-stage-${APP_NAME}-${VERSION}-amd64.XXXXXX")
|
||||
if [ -z "$STAGE_DIR" ] || [ ! -d "$STAGE_DIR" ]; then
|
||||
echo -e "${RED} ❌ 创建 DMG 临时目录失败,跳过 DMG 打包。${NC}"
|
||||
else
|
||||
if command -v ditto &> /dev/null; then
|
||||
ditto "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
else
|
||||
cp -R "$DIST_DIR/$APP_DEST_NAME" "$STAGE_DIR/$APP_DEST_NAME"
|
||||
fi
|
||||
|
||||
# --sandbox-safe 会跳过 Finder 的 AppleScript 排版,避免打包过程中弹出/打开挂载窗口(CI/本地静默打包更友好)。
|
||||
CREATE_DMG_ARGS=(--volname "${APP_NAME} ${VERSION}" --format UDZO --sandbox-safe)
|
||||
if [ -n "$MAC_VOLICON_PATH" ]; then
|
||||
CREATE_DMG_ARGS+=(--volicon "$MAC_VOLICON_PATH")
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 macOS 卷图标 (build/darwin/icon.icns),跳过 --volicon。${NC}"
|
||||
fi
|
||||
|
||||
create-dmg "${CREATE_DMG_ARGS[@]}" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_DEST_NAME" 200 190 \
|
||||
--hide-extension "$APP_DEST_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DIST_DIR/$DMG_NAME" \
|
||||
"$STAGE_DIR"
|
||||
|
||||
CREATE_DMG_EXIT_CODE=$?
|
||||
rm -rf "$STAGE_DIR"
|
||||
|
||||
if [ $CREATE_DMG_EXIT_CODE -ne 0 ]; then
|
||||
echo -e "${RED} ❌ create-dmg 执行失败 (exit=$CREATE_DMG_EXIT_CODE),保留 .app 以便排查。${NC}"
|
||||
else
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
RW_FILE=$(find "$DIST_DIR" -maxdepth 1 -name "rw.*.dmg" -print -quit)
|
||||
if [ -n "$RW_FILE" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 create-dmg 中间产物: $(basename "$RW_FILE"),正在转换为可分发 DMG...${NC}"
|
||||
hdiutil convert "$RW_FILE" -format UDZO -o "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
rm -f "$RW_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
DMG_FORMAT=$(hdiutil imageinfo "$DIST_DIR/$DMG_NAME" 2>/dev/null | awk -F': ' '/^Format:/{print $2; exit}')
|
||||
if [ "$DMG_FORMAT" = "UDRW" ]; then
|
||||
echo -e "${YELLOW} ⚠️ 检测到 UDRW(可写原始映像),正在转换为 UDZO...${NC}"
|
||||
TMP_UDZO="$DIST_DIR/.tmp.$DMG_NAME"
|
||||
rm -f "$TMP_UDZO"
|
||||
hdiutil convert "$DIST_DIR/$DMG_NAME" -format UDZO -o "$TMP_UDZO" >/dev/null 2>&1 && mv "$TMP_UDZO" "$DIST_DIR/$DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$DIST_DIR/$DMG_NAME" ] && command -v hdiutil &> /dev/null; then
|
||||
hdiutil verify "$DIST_DIR/$DMG_NAME" >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${RED} ❌ DMG 校验失败,保留 .app 以便排查。${NC}"
|
||||
else
|
||||
rm -rf "$DIST_DIR/$APP_DEST_NAME"
|
||||
echo " ✅ 已生成 $DMG_NAME"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f "$DIST_DIR/$DMG_NAME" ]; then
|
||||
echo -e "${RED} ❌ DMG 生成失败。${NC}"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 create-dmg 工具。${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED} ❌ macOS amd64 构建失败。${NC}"
|
||||
fi
|
||||
|
||||
# --- Windows AMD64 构建 ---
|
||||
@@ -134,7 +309,9 @@ echo -e "${GREEN}🪟 正在构建 Windows (amd64)...${NC}"
|
||||
if command -v x86_64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
TARGET_EXE="$DIST_DIR/${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$TARGET_EXE"
|
||||
try_compress_binary_with_upx "$TARGET_EXE" "Windows amd64 可执行文件"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows amd64 构建失败。${NC}"
|
||||
@@ -148,7 +325,9 @@ echo -e "${GREEN}🪟 正在构建 Windows (arm64)...${NC}"
|
||||
if command -v aarch64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
TARGET_EXE="$DIST_DIR/${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$TARGET_EXE"
|
||||
echo -e "${YELLOW} ⚠️ 当前 UPX 不支持 win64/arm64,跳过 Windows arm64 压缩。${NC}"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows arm64 构建失败。${NC}"
|
||||
@@ -168,8 +347,10 @@ if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "x86_64" ]; then
|
||||
# 本机 Linux amd64,直接构建
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux amd64 可执行文件"
|
||||
# 打包为 tar.gz
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
@@ -186,8 +367,10 @@ elif command -v x86_64-linux-gnu-gcc &> /dev/null; then
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux amd64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
@@ -208,8 +391,10 @@ if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "aarch64" ]; then
|
||||
# 本机 Linux arm64,直接构建
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux arm64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
@@ -225,8 +410,10 @@ elif command -v aarch64-linux-gnu-gcc &> /dev/null; then
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
TARGET_LINUX_BIN="$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$TARGET_LINUX_BIN"
|
||||
chmod +x "$TARGET_LINUX_BIN"
|
||||
try_compress_binary_with_upx "$TARGET_LINUX_BIN" "Linux arm64 可执行文件"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
|
||||
68
build/darwin/Info.dev.plist
Normal file
68
build/darwin/Info.dev.plist
Normal file
@@ -0,0 +1,68 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>{{.Info.ProductName}}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>{{.OutputFilename}}</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.wails.{{.Name}}.dev</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>{{.Info.Comments}}</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>iconfile</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>10.13.0</string>
|
||||
<key>NSHighResolutionCapable</key>
|
||||
<string>true</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>{{.Info.Copyright}}</string>
|
||||
{{if .Info.FileAssociations}}
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
{{range .Info.FileAssociations}}
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
<string>{{.Ext}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>{{.Name}}</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
<key>CFBundleTypeIconFile</key>
|
||||
<string>{{.IconName}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
{{if .Info.Protocols}}
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
{{range .Info.Protocols}}
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.wails.{{.Scheme}}</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>{{.Scheme}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
<key>NSAppTransportSecurity</key>
|
||||
<dict>
|
||||
<key>NSAllowsLocalNetworking</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
63
build/darwin/Info.plist
Normal file
63
build/darwin/Info.plist
Normal file
@@ -0,0 +1,63 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>{{.Info.ProductName}}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>{{.OutputFilename}}</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.wails.{{.Name}}</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>{{.Info.Comments}}</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>iconfile</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>10.13.0</string>
|
||||
<key>NSHighResolutionCapable</key>
|
||||
<string>true</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>{{.Info.Copyright}}</string>
|
||||
{{if .Info.FileAssociations}}
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
{{range .Info.FileAssociations}}
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
<string>{{.Ext}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>{{.Name}}</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
<key>CFBundleTypeIconFile</key>
|
||||
<string>{{.IconName}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
{{if .Info.Protocols}}
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
{{range .Info.Protocols}}
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.wails.{{.Scheme}}</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>{{.Scheme}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
</dict>
|
||||
</plist>
|
||||
BIN
build/darwin/icon.icns
Normal file
BIN
build/darwin/icon.icns
Normal file
Binary file not shown.
227
cmd/mysql-driver-agent/main.go
Normal file
227
cmd/mysql-driver-agent/main.go
Normal file
@@ -0,0 +1,227 @@
|
||||
//go:build gonavi_mysql_driver
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
)
|
||||
|
||||
type mysqlAgentRequest struct {
|
||||
ID int64 `json:"id"`
|
||||
Method string `json:"method"`
|
||||
Config *connection.ConnectionConfig `json:"config,omitempty"`
|
||||
Query string `json:"query,omitempty"`
|
||||
DBName string `json:"dbName,omitempty"`
|
||||
TableName string `json:"tableName,omitempty"`
|
||||
Changes *connection.ChangeSet `json:"changes,omitempty"`
|
||||
}
|
||||
|
||||
type mysqlAgentResponse struct {
|
||||
ID int64 `json:"id"`
|
||||
Success bool `json:"success"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
RowsAffected int64 `json:"rowsAffected,omitempty"`
|
||||
}
|
||||
|
||||
const (
|
||||
mysqlAgentMethodConnect = "connect"
|
||||
mysqlAgentMethodClose = "close"
|
||||
mysqlAgentMethodPing = "ping"
|
||||
mysqlAgentMethodQuery = "query"
|
||||
mysqlAgentMethodExec = "exec"
|
||||
mysqlAgentMethodGetDatabases = "getDatabases"
|
||||
mysqlAgentMethodGetTables = "getTables"
|
||||
mysqlAgentMethodGetCreateStmt = "getCreateStatement"
|
||||
mysqlAgentMethodGetColumns = "getColumns"
|
||||
mysqlAgentMethodGetAllColumns = "getAllColumns"
|
||||
mysqlAgentMethodGetIndexes = "getIndexes"
|
||||
mysqlAgentMethodGetForeignKey = "getForeignKeys"
|
||||
mysqlAgentMethodGetTriggers = "getTriggers"
|
||||
mysqlAgentMethodApplyChanges = "applyChanges"
|
||||
)
|
||||
|
||||
func main() {
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
scanner.Buffer(make([]byte, 0, 16<<10), 8<<20)
|
||||
writer := bufio.NewWriter(os.Stdout)
|
||||
defer writer.Flush()
|
||||
|
||||
var inst *db.MySQLDB
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var req mysqlAgentRequest
|
||||
if err := json.Unmarshal([]byte(line), &req); err != nil {
|
||||
_ = writeResponse(writer, mysqlAgentResponse{
|
||||
ID: req.ID,
|
||||
Success: false,
|
||||
Error: fmt.Sprintf("解析请求失败:%v", err),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
resp := handleRequest(&inst, req)
|
||||
if err := writeResponse(writer, resp); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "写入响应失败:%v\n", err)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if inst != nil {
|
||||
_ = inst.Close()
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "读取请求失败:%v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleRequest(inst **db.MySQLDB, req mysqlAgentRequest) mysqlAgentResponse {
|
||||
resp := mysqlAgentResponse{
|
||||
ID: req.ID,
|
||||
Success: true,
|
||||
}
|
||||
|
||||
switch strings.TrimSpace(req.Method) {
|
||||
case mysqlAgentMethodConnect:
|
||||
if req.Config == nil {
|
||||
return fail(resp, "连接配置为空")
|
||||
}
|
||||
if *inst != nil {
|
||||
_ = (*inst).Close()
|
||||
}
|
||||
next := &db.MySQLDB{}
|
||||
if err := next.Connect(*req.Config); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
*inst = next
|
||||
return resp
|
||||
case mysqlAgentMethodClose:
|
||||
if *inst != nil {
|
||||
if err := (*inst).Close(); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
*inst = nil
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
if *inst == nil {
|
||||
return fail(resp, "connection not open")
|
||||
}
|
||||
|
||||
switch strings.TrimSpace(req.Method) {
|
||||
case mysqlAgentMethodPing:
|
||||
if err := (*inst).Ping(); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
case mysqlAgentMethodQuery:
|
||||
data, fields, err := (*inst).Query(req.Query)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
resp.Fields = fields
|
||||
case mysqlAgentMethodExec:
|
||||
affected, err := (*inst).Exec(req.Query)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.RowsAffected = affected
|
||||
case mysqlAgentMethodGetDatabases:
|
||||
data, err := (*inst).GetDatabases()
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetTables:
|
||||
data, err := (*inst).GetTables(req.DBName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetCreateStmt:
|
||||
data, err := (*inst).GetCreateStatement(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetColumns:
|
||||
data, err := (*inst).GetColumns(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetAllColumns:
|
||||
data, err := (*inst).GetAllColumns(req.DBName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetIndexes:
|
||||
data, err := (*inst).GetIndexes(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetForeignKey:
|
||||
data, err := (*inst).GetForeignKeys(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodGetTriggers:
|
||||
data, err := (*inst).GetTriggers(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case mysqlAgentMethodApplyChanges:
|
||||
if req.Changes == nil {
|
||||
return fail(resp, "变更集为空")
|
||||
}
|
||||
applier, ok := interface{}(*inst).(interface {
|
||||
ApplyChanges(tableName string, changes connection.ChangeSet) error
|
||||
})
|
||||
if !ok {
|
||||
return fail(resp, "当前驱动不支持 ApplyChanges")
|
||||
}
|
||||
if err := applier.ApplyChanges(req.TableName, *req.Changes); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
default:
|
||||
return fail(resp, "不支持的方法")
|
||||
}
|
||||
|
||||
return resp
|
||||
}
|
||||
|
||||
func writeResponse(writer *bufio.Writer, resp mysqlAgentResponse) error {
|
||||
payload, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
payload = append(payload, '\n')
|
||||
if _, err := writer.Write(payload); err != nil {
|
||||
return err
|
||||
}
|
||||
return writer.Flush()
|
||||
}
|
||||
|
||||
func fail(resp mysqlAgentResponse, errText string) mysqlAgentResponse {
|
||||
resp.Success = false
|
||||
resp.Error = strings.TrimSpace(errText)
|
||||
return resp
|
||||
}
|
||||
330
cmd/optional-driver-agent/main.go
Normal file
330
cmd/optional-driver-agent/main.go
Normal file
@@ -0,0 +1,330 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
)
|
||||
|
||||
type agentRequest struct {
|
||||
ID int64 `json:"id"`
|
||||
Method string `json:"method"`
|
||||
Config *connection.ConnectionConfig `json:"config,omitempty"`
|
||||
Query string `json:"query,omitempty"`
|
||||
TimeoutMs int64 `json:"timeoutMs,omitempty"`
|
||||
DBName string `json:"dbName,omitempty"`
|
||||
TableName string `json:"tableName,omitempty"`
|
||||
Changes *connection.ChangeSet `json:"changes,omitempty"`
|
||||
}
|
||||
|
||||
type agentResponse struct {
|
||||
ID int64 `json:"id"`
|
||||
Success bool `json:"success"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
RowsAffected int64 `json:"rowsAffected,omitempty"`
|
||||
}
|
||||
|
||||
const (
|
||||
agentMethodConnect = "connect"
|
||||
agentMethodClose = "close"
|
||||
agentMethodPing = "ping"
|
||||
agentMethodQuery = "query"
|
||||
agentMethodExec = "exec"
|
||||
agentMethodGetDatabases = "getDatabases"
|
||||
agentMethodGetTables = "getTables"
|
||||
agentMethodGetCreateStmt = "getCreateStatement"
|
||||
agentMethodGetColumns = "getColumns"
|
||||
agentMethodGetAllColumns = "getAllColumns"
|
||||
agentMethodGetIndexes = "getIndexes"
|
||||
agentMethodGetForeignKey = "getForeignKeys"
|
||||
agentMethodGetTriggers = "getTriggers"
|
||||
agentMethodApplyChanges = "applyChanges"
|
||||
)
|
||||
|
||||
const legacyClickHouseDefaultTimeout = 2 * time.Hour
|
||||
|
||||
var (
|
||||
agentDriverType string
|
||||
agentDatabaseFactory func() db.Database
|
||||
)
|
||||
|
||||
func main() {
|
||||
if agentDatabaseFactory == nil || strings.TrimSpace(agentDriverType) == "" {
|
||||
fmt.Fprintf(os.Stderr, "未配置驱动代理 provider,请使用 gonavi_<driver>_driver 标签构建\n")
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
scanner.Buffer(make([]byte, 0, 16<<10), 8<<20)
|
||||
writer := bufio.NewWriter(os.Stdout)
|
||||
defer writer.Flush()
|
||||
|
||||
var inst db.Database
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var req agentRequest
|
||||
if err := json.Unmarshal([]byte(line), &req); err != nil {
|
||||
_ = writeResponse(writer, agentResponse{
|
||||
ID: req.ID,
|
||||
Success: false,
|
||||
Error: fmt.Sprintf("解析请求失败:%v", err),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
resp := handleRequest(&inst, req)
|
||||
if err := writeResponse(writer, resp); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "写入响应失败:%v\n", err)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if inst != nil {
|
||||
_ = inst.Close()
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "读取请求失败:%v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func handleRequest(inst *db.Database, req agentRequest) agentResponse {
|
||||
resp := agentResponse{ID: req.ID, Success: true}
|
||||
method := strings.TrimSpace(req.Method)
|
||||
|
||||
switch method {
|
||||
case agentMethodConnect:
|
||||
if req.Config == nil {
|
||||
return fail(resp, "连接配置为空")
|
||||
}
|
||||
if *inst != nil {
|
||||
_ = (*inst).Close()
|
||||
}
|
||||
next := agentDatabaseFactory()
|
||||
if next == nil {
|
||||
return fail(resp, "驱动代理初始化失败")
|
||||
}
|
||||
if err := next.Connect(*req.Config); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
*inst = next
|
||||
return resp
|
||||
case agentMethodClose:
|
||||
if *inst != nil {
|
||||
if err := (*inst).Close(); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
*inst = nil
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
if *inst == nil {
|
||||
return fail(resp, "connection not open")
|
||||
}
|
||||
|
||||
switch method {
|
||||
case agentMethodPing:
|
||||
if err := (*inst).Ping(); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
case agentMethodQuery:
|
||||
data, fields, err := queryWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
resp.Fields = fields
|
||||
case agentMethodExec:
|
||||
affected, err := execWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.RowsAffected = affected
|
||||
case agentMethodGetDatabases:
|
||||
data, err := (*inst).GetDatabases()
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetTables:
|
||||
data, err := (*inst).GetTables(req.DBName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetCreateStmt:
|
||||
data, err := (*inst).GetCreateStatement(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetColumns:
|
||||
data, err := (*inst).GetColumns(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetAllColumns:
|
||||
data, err := (*inst).GetAllColumns(req.DBName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetIndexes:
|
||||
data, err := (*inst).GetIndexes(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetForeignKey:
|
||||
data, err := (*inst).GetForeignKeys(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodGetTriggers:
|
||||
data, err := (*inst).GetTriggers(req.DBName, req.TableName)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
case agentMethodApplyChanges:
|
||||
if req.Changes == nil {
|
||||
return fail(resp, "变更集为空")
|
||||
}
|
||||
applier, ok := (*inst).(interface {
|
||||
ApplyChanges(tableName string, changes connection.ChangeSet) error
|
||||
})
|
||||
if !ok {
|
||||
return fail(resp, "当前驱动不支持 ApplyChanges")
|
||||
}
|
||||
if err := applier.ApplyChanges(req.TableName, *req.Changes); err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
default:
|
||||
return fail(resp, "不支持的方法")
|
||||
}
|
||||
|
||||
return resp
|
||||
}
|
||||
|
||||
func writeResponse(writer *bufio.Writer, resp agentResponse) error {
|
||||
// 对响应数据做统一 JSON 安全归一化:
|
||||
// 将 map[any]any(如 duckdb.Map)递归转换为 map[string]any,避免序列化失败导致代理进程退出。
|
||||
safeResp := resp
|
||||
safeResp.Data = normalizeAgentResponseData(resp.Data)
|
||||
payload, err := json.Marshal(safeResp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
payload = append(payload, '\n')
|
||||
if _, err := writer.Write(payload); err != nil {
|
||||
return err
|
||||
}
|
||||
return writer.Flush()
|
||||
}
|
||||
|
||||
func fail(resp agentResponse, errText string) agentResponse {
|
||||
resp.Success = false
|
||||
resp.Error = strings.TrimSpace(errText)
|
||||
return resp
|
||||
}
|
||||
|
||||
func normalizeAgentResponseData(v interface{}) interface{} {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Pointer, reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return normalizeAgentResponseData(rv.Elem().Interface())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
out := make(map[string]interface{}, rv.Len())
|
||||
iter := rv.MapRange()
|
||||
for iter.Next() {
|
||||
out[fmt.Sprint(iter.Key().Interface())] = normalizeAgentResponseData(iter.Value().Interface())
|
||||
}
|
||||
return out
|
||||
case reflect.Slice:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
// 保持 []byte 原样,避免改变现有二进制列的 JSON 编码行为(base64)。
|
||||
if rv.Type().Elem().Kind() == reflect.Uint8 {
|
||||
return v
|
||||
}
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
case reflect.Array:
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func queryWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) ([]map[string]interface{}, []string, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Query(query)
|
||||
}
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
func execWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) (int64, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Exec(query)
|
||||
}
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
172
cmd/optional-driver-agent/main_test.go
Normal file
172
cmd/optional-driver-agent/main_test.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type duckMapLike map[any]any
|
||||
|
||||
func TestWriteResponse_NormalizesMapAnyAny(t *testing.T) {
|
||||
resp := agentResponse{
|
||||
ID: 1,
|
||||
Success: true,
|
||||
Data: []map[string]interface{}{
|
||||
{
|
||||
"id": int64(7),
|
||||
"meta": duckMapLike{"k": "v", 2: "two"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var out bytes.Buffer
|
||||
writer := bufio.NewWriter(&out)
|
||||
if err := writeResponse(writer, resp); err != nil {
|
||||
t.Fatalf("writeResponse 返回错误: %v", err)
|
||||
}
|
||||
|
||||
var decoded struct {
|
||||
Data []map[string]interface{} `json:"data"`
|
||||
}
|
||||
if err := json.Unmarshal(bytes.TrimSpace(out.Bytes()), &decoded); err != nil {
|
||||
t.Fatalf("解码响应失败: %v", err)
|
||||
}
|
||||
|
||||
if len(decoded.Data) != 1 {
|
||||
t.Fatalf("期望 1 行数据,实际 %d", len(decoded.Data))
|
||||
}
|
||||
meta, ok := decoded.Data[0]["meta"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("meta 字段类型异常: %T", decoded.Data[0]["meta"])
|
||||
}
|
||||
if meta["k"] != "v" {
|
||||
t.Fatalf("字符串 key 转换异常: %v", meta["k"])
|
||||
}
|
||||
if meta["2"] != "two" {
|
||||
t.Fatalf("数字 key 未字符串化: %v", meta["2"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeAgentResponseData_KeepByteSlice(t *testing.T) {
|
||||
raw := []byte{0x61, 0x62, 0x63}
|
||||
normalized := normalizeAgentResponseData(raw)
|
||||
out, ok := normalized.([]byte)
|
||||
if !ok {
|
||||
t.Fatalf("期望 []byte,实际 %T", normalized)
|
||||
}
|
||||
if !bytes.Equal(out, raw) {
|
||||
t.Fatalf("[]byte 内容被意外改写: %v", out)
|
||||
}
|
||||
}
|
||||
|
||||
type fakeAgentTimeoutDB struct {
|
||||
queryCalled bool
|
||||
queryContextCalled bool
|
||||
execCalled bool
|
||||
execContextCalled bool
|
||||
deadlineSet bool
|
||||
}
|
||||
|
||||
func (f *fakeAgentTimeoutDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Close() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Ping() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryCalled = true
|
||||
return nil, nil, errors.New("query should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return []map[string]interface{}{{"ok": 1}}, []string{"ok"}, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) Exec(query string) (int64, error) {
|
||||
f.execCalled = true
|
||||
return 0, errors.New("exec should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
f.execContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return 3, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeAgentTimeoutDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_UsesQueryContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
data, fields, err := queryWithOptionalTimeout(fake, "SELECT 1", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("queryWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if len(data) != 1 || len(fields) != 1 || fields[0] != "ok" {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回数据异常: data=%v fields=%v", data, fields)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecWithOptionalTimeout_UsesExecContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
affected, err := execWithOptionalTimeout(fake, "DELETE FROM t", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("execWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.execContextCalled || fake.execCalled {
|
||||
t.Fatalf("exec 调用路径异常,ExecContext=%v Exec=%v", fake.execContextCalled, fake.execCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("execWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if affected != 3 {
|
||||
t.Fatalf("受影响行数异常,want=3 got=%d", affected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_ClickHouseLegacyModeUsesQueryContext(t *testing.T) {
|
||||
old := agentDriverType
|
||||
agentDriverType = "clickhouse"
|
||||
defer func() { agentDriverType = old }()
|
||||
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
_, _, err := queryWithOptionalTimeout(fake, "SELECT 1", 0)
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("clickhouse legacy query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_clickhouse_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "clickhouse"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.ClickHouseDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_dameng.go
Normal file
12
cmd/optional-driver-agent/provider_dameng.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_dameng_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "dameng"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.DamengDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_diros.go
Normal file
12
cmd/optional-driver-agent/provider_diros.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_diros_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "diros"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.DirosDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_duckdb.go
Normal file
12
cmd/optional-driver-agent/provider_duckdb.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_duckdb_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "duckdb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.DuckDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_highgo.go
Normal file
12
cmd/optional-driver-agent/provider_highgo.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_highgo_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "highgo"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.HighGoDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_kingbase.go
Normal file
12
cmd/optional-driver-agent/provider_kingbase.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_kingbase_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "kingbase"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.KingbaseDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_mariadb.go
Normal file
12
cmd/optional-driver-agent/provider_mariadb.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mariadb_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mariadb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MariaDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_mongodb.go
Normal file
12
cmd/optional-driver-agent/provider_mongodb.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mongodb_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mongodb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MongoDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mongodb_driver_v1
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mongodb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MongoDBV1{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_mysql.go
Normal file
12
cmd/optional-driver-agent/provider_mysql.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mysql_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mysql"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MySQLDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_sphinx.go
Normal file
12
cmd/optional-driver-agent/provider_sphinx.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_sphinx_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "sphinx"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.SphinxDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_sqlite.go
Normal file
12
cmd/optional-driver-agent/provider_sqlite.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_sqlite_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "sqlite"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.SQLiteDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_sqlserver.go
Normal file
12
cmd/optional-driver-agent/provider_sqlserver.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_sqlserver_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "sqlserver"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.SqlServerDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_tdengine.go
Normal file
12
cmd/optional-driver-agent/provider_tdengine.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_tdengine_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "tdengine"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.TDengineDB{}
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_vastbase.go
Normal file
12
cmd/optional-driver-agent/provider_vastbase.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_vastbase_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "vastbase"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.VastbaseDB{}
|
||||
}
|
||||
}
|
||||
89
docs/driver-manifest.json
Normal file
89
docs/driver-manifest.json
Normal file
@@ -0,0 +1,89 @@
|
||||
{
|
||||
"engine": "go",
|
||||
"drivers": {
|
||||
"mariadb": {
|
||||
"engine": "go",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/mariadb"
|
||||
},
|
||||
"doris": {
|
||||
"engine": "go",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/doris"
|
||||
},
|
||||
"sphinx": {
|
||||
"engine": "go",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sphinx"
|
||||
},
|
||||
"sqlserver": {
|
||||
"engine": "go",
|
||||
"version": "1.9.6",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sqlserver"
|
||||
},
|
||||
"sqlite": {
|
||||
"engine": "go",
|
||||
"version": "1.44.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sqlite"
|
||||
},
|
||||
"duckdb": {
|
||||
"engine": "go",
|
||||
"version": "2.5.6",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/duckdb"
|
||||
},
|
||||
"dameng": {
|
||||
"engine": "go",
|
||||
"version": "1.8.22",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/dameng"
|
||||
},
|
||||
"kingbase": {
|
||||
"engine": "go",
|
||||
"version": "0.0.0-20201021123113-29bd62a876c3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/kingbase"
|
||||
},
|
||||
"highgo": {
|
||||
"engine": "go",
|
||||
"version": "0.0.0-local",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/highgo"
|
||||
},
|
||||
"vastbase": {
|
||||
"engine": "go",
|
||||
"version": "1.11.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/vastbase"
|
||||
},
|
||||
"mongodb": {
|
||||
"engine": "go",
|
||||
"version": "2.5.0",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/mongodb"
|
||||
},
|
||||
"tdengine": {
|
||||
"engine": "go",
|
||||
"version": "3.7.8",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/tdengine"
|
||||
},
|
||||
"clickhouse": {
|
||||
"engine": "go",
|
||||
"version": "2.43.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/clickhouse"
|
||||
},
|
||||
"postgres": {
|
||||
"engine": "go",
|
||||
"version": "1.11.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/postgres"
|
||||
}
|
||||
}
|
||||
}
|
||||
1
frontend/.gitignore
vendored
Normal file
1
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.ace-tool/
|
||||
@@ -5,6 +5,23 @@
|
||||
<link rel="icon" type="image/svg+xml" href="/logo.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>GoNavi</title>
|
||||
<script>
|
||||
if (typeof window !== 'undefined' && !window.go) {
|
||||
window.go = {
|
||||
app: {
|
||||
App: new Proxy({}, { get: () => async () => ({ success: false }) })
|
||||
}
|
||||
};
|
||||
}
|
||||
if (typeof window !== 'undefined' && !window.runtime) {
|
||||
window.runtime = new Proxy({}, {
|
||||
get: (target, prop) => {
|
||||
if (prop === 'Environment') return async () => ({ platform: 'darwin' });
|
||||
return typeof prop === 'string' && prop.startsWith('WindowIs') ? () => false : () => {};
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
@@ -6,6 +6,10 @@ html, body, #root {
|
||||
background-color: transparent !important; /* CRITICAL: Allow Wails window transparency */
|
||||
}
|
||||
|
||||
body, #root {
|
||||
border-radius: 14px; /* Slightly rounded app window corners */
|
||||
}
|
||||
|
||||
/* 侧边栏 Tree 样式优化 */
|
||||
.ant-tree .ant-tree-treenode {
|
||||
width: 100%;
|
||||
@@ -53,14 +57,102 @@ body[data-theme='dark'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: #666;
|
||||
}
|
||||
|
||||
/* Scrollbar styling for light mode (transparent-friendly) */
|
||||
body[data-theme='light'] ::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.18);
|
||||
border-radius: 4px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(0, 0, 0, 0.30);
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
|
||||
/* Ensure body background matches theme to avoid white flashes, but kept transparent for window composition */
|
||||
body {
|
||||
transition: color 0.3s;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] {
|
||||
/* Improve contrast on transparent backgrounds */
|
||||
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.8);
|
||||
/* 移除全局 text-shadow:对每个文本元素增加 GPU compositing 成本,
|
||||
在透明窗口环境下会显著加剧 GPU 负载 */
|
||||
}
|
||||
|
||||
/* 暗色 + 透明:提升选中/焦点可读性,避免默认蓝色在半透明背景下发灰 */
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected,
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected:hover {
|
||||
background: rgba(246, 196, 83, 0.24) !important;
|
||||
color: rgba(255, 236, 179, 0.98) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-checked .ant-checkbox-inner {
|
||||
background-color: #f6c453 !important;
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-indeterminate .ant-checkbox-inner::after {
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox:hover .ant-checkbox-inner,
|
||||
body[data-theme='dark'] .ant-checkbox-wrapper:hover .ant-checkbox-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-checked .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-wrapper:hover .ant-radio-inner,
|
||||
body[data-theme='dark'] .ant-radio:hover .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-switch.ant-switch-checked {
|
||||
background: #d8a93b !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.18) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected:hover > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected:hover > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.26) !important;
|
||||
}
|
||||
|
||||
/* 连接配置弹窗:滚动仅在弹窗 body 内部,不使用外层 wrap 滚动条 */
|
||||
.connection-modal-wrap {
|
||||
overflow: hidden !important;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-content {
|
||||
max-height: calc(100vh - 72px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-body {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-footer {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
/* Custom Title Bar Close Button Hover */
|
||||
@@ -68,3 +160,53 @@ body[data-theme='dark'] {
|
||||
background-color: #ff4d4f !important;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
/* 驱动管理:统一关闭 antd sticky 横向条,仅保留自定义独立横向条 */
|
||||
.driver-manager-table .ant-table-sticky-scroll {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* 仅在独立横向条激活时隐藏表格自身横向滚动条,避免出现双横向条 */
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body {
|
||||
overflow-x: auto !important;
|
||||
-ms-overflow-style: none;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content::-webkit-scrollbar:horizontal,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body::-webkit-scrollbar:horizontal {
|
||||
height: 0 !important;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap {
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.driver-manager-footer {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.driver-manager-footer-actions {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll {
|
||||
width: 100%;
|
||||
height: 12px;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
scrollbar-gutter: stable;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll-inner {
|
||||
height: 1px;
|
||||
}
|
||||
|
||||
1810
frontend/src/App.tsx
1810
frontend/src/App.tsx
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import React, { useState, useEffect, useMemo, useRef } from 'react';
|
||||
import { Modal, Form, Select, Input, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs, theme as antdTheme } from 'antd';
|
||||
import { DatabaseOutlined, RocketOutlined, SwapOutlined, TableOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
const { Title, Text } = Typography;
|
||||
const { Step } = Steps;
|
||||
@@ -21,6 +23,12 @@ type TableDiffSummary = {
|
||||
deletes?: number;
|
||||
same?: number;
|
||||
message?: string;
|
||||
targetTableExists?: boolean;
|
||||
plannedAction?: string;
|
||||
warnings?: string[];
|
||||
unsupportedObjects?: string[];
|
||||
indexesToCreate?: number;
|
||||
indexesSkipped?: number;
|
||||
};
|
||||
type TableOps = {
|
||||
insert: boolean;
|
||||
@@ -31,10 +39,135 @@ type TableOps = {
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
type WorkflowType = 'sync' | 'migration';
|
||||
|
||||
const quoteSqlIdent = (dbType: string, ident: string): string => {
|
||||
const raw = String(ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'mysql' || t === 'mariadb' || t === 'diros' || t === 'sphinx' || t === 'clickhouse' || t === 'tdengine') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
if (t === 'sqlserver') {
|
||||
return `[${raw.replace(/]/g, ']]')}]`;
|
||||
}
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
|
||||
const quoteSqlTable = (dbType: string, tableName: string): string => {
|
||||
const raw = String(tableName || '').trim();
|
||||
if (!raw) return raw;
|
||||
if (!raw.includes('.')) return quoteSqlIdent(dbType, raw);
|
||||
return raw
|
||||
.split('.')
|
||||
.map((part) => quoteSqlIdent(dbType, part))
|
||||
.join('.');
|
||||
};
|
||||
|
||||
const toSqlLiteral = (value: any, dbType: string): string => {
|
||||
if (value === null || value === undefined) return 'NULL';
|
||||
if (typeof value === 'number') return Number.isFinite(value) ? String(value) : 'NULL';
|
||||
if (typeof value === 'bigint') return value.toString();
|
||||
if (typeof value === 'boolean') {
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'sqlserver') return value ? '1' : '0';
|
||||
return value ? 'TRUE' : 'FALSE';
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return `'${value.toISOString().replace(/'/g, "''")}'`;
|
||||
}
|
||||
if (typeof value === 'object') {
|
||||
try {
|
||||
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
|
||||
} catch {
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
}
|
||||
}
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
};
|
||||
|
||||
const resolveRedisDbIndex = (raw?: string): number => {
|
||||
const value = Number(String(raw || '').trim());
|
||||
return Number.isInteger(value) && value >= 0 && value <= 15 ? value : 0;
|
||||
};
|
||||
|
||||
const buildSqlPreview = (
|
||||
previewData: any,
|
||||
tableName: string,
|
||||
dbType: string,
|
||||
ops?: TableOps,
|
||||
): { sqlText: string; statementCount: number } => {
|
||||
if (!previewData || !tableName) return { sqlText: '', statementCount: 0 };
|
||||
const tableExpr = quoteSqlTable(dbType, tableName);
|
||||
const pkCol = String(previewData.pkColumn || 'id');
|
||||
const statements: string[] = [];
|
||||
|
||||
const insertRows = Array.isArray(previewData.inserts) ? previewData.inserts : [];
|
||||
const updateRows = Array.isArray(previewData.updates) ? previewData.updates : [];
|
||||
const deleteRows = Array.isArray(previewData.deletes) ? previewData.deletes : [];
|
||||
|
||||
const selectedInsert = new Set((ops?.selectedInsertPks || []).map((v) => String(v)));
|
||||
const selectedUpdate = new Set((ops?.selectedUpdatePks || []).map((v) => String(v)));
|
||||
const selectedDelete = new Set((ops?.selectedDeletePks || []).map((v) => String(v)));
|
||||
|
||||
if (ops?.insert !== false) {
|
||||
insertRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedInsert.size > 0 && !selectedInsert.has(pk)) return;
|
||||
const row = rowWrap?.row || {};
|
||||
const columns = Object.keys(row);
|
||||
if (columns.length === 0) return;
|
||||
const colExpr = columns.map((c) => quoteSqlIdent(dbType, c)).join(', ');
|
||||
const valExpr = columns.map((c) => toSqlLiteral(row[c], dbType)).join(', ');
|
||||
statements.push(`INSERT INTO ${tableExpr} (${colExpr}) VALUES (${valExpr});`);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.update !== false) {
|
||||
updateRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedUpdate.size > 0 && !selectedUpdate.has(pk)) return;
|
||||
const source = rowWrap?.source || {};
|
||||
const changedColumns = Array.isArray(rowWrap?.changedColumns)
|
||||
? rowWrap.changedColumns
|
||||
: Object.keys(source).filter((k) => k !== pkCol);
|
||||
const setCols = changedColumns.filter((c: string) => String(c) !== pkCol);
|
||||
if (setCols.length === 0) return;
|
||||
const setExpr = setCols
|
||||
.map((c: string) => `${quoteSqlIdent(dbType, c)} = ${toSqlLiteral(source[c], dbType)}`)
|
||||
.join(', ');
|
||||
statements.push(
|
||||
`UPDATE ${tableExpr} SET ${setExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.delete) {
|
||||
deleteRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedDelete.size > 0 && !selectedDelete.has(pk)) return;
|
||||
statements.push(
|
||||
`DELETE FROM ${tableExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
sqlText: statements.join('\n'),
|
||||
statementCount: statements.length,
|
||||
};
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const themeMode = useStore((state) => state.theme);
|
||||
const appearance = useStore((state) => state.appearance);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { token } = antdTheme.useToken();
|
||||
const darkMode = themeMode === 'dark';
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const effectiveOpacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
|
||||
// Step 1: Config
|
||||
const [sourceConnId, setSourceConnId] = useState<string>('');
|
||||
@@ -50,9 +183,13 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const [selectedTables, setSelectedTables] = useState<string[]>([]);
|
||||
|
||||
// Options
|
||||
const [workflowType, setWorkflowType] = useState<WorkflowType>('sync');
|
||||
const [syncContent, setSyncContent] = useState<'data' | 'schema' | 'both'>('data');
|
||||
const [syncMode, setSyncMode] = useState<string>('insert_update');
|
||||
const [autoAddColumns, setAutoAddColumns] = useState<boolean>(true);
|
||||
const [targetTableStrategy, setTargetTableStrategy] = useState<'existing_only' | 'auto_create_if_missing' | 'smart'>('existing_only');
|
||||
const [createIndexes, setCreateIndexes] = useState<boolean>(false);
|
||||
const [mongoCollectionName, setMongoCollectionName] = useState<string>('');
|
||||
const [showSameTables, setShowSameTables] = useState<boolean>(false);
|
||||
const [analyzing, setAnalyzing] = useState<boolean>(false);
|
||||
const [diffTables, setDiffTables] = useState<TableDiffSummary[]>([]);
|
||||
@@ -128,9 +265,12 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setSourceDb('');
|
||||
setTargetDb('');
|
||||
setSelectedTables([]);
|
||||
setWorkflowType('sync');
|
||||
setSyncContent('data');
|
||||
setSyncMode('insert_update');
|
||||
setAutoAddColumns(true);
|
||||
setTargetTableStrategy('existing_only');
|
||||
setCreateIndexes(false);
|
||||
setShowSameTables(false);
|
||||
setAnalyzing(false);
|
||||
setDiffTables([]);
|
||||
@@ -148,36 +288,66 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (workflowType === 'migration') {
|
||||
if (syncMode === 'insert_update') {
|
||||
setSyncMode('insert_only');
|
||||
}
|
||||
if (syncContent === 'schema') {
|
||||
setSyncContent('both');
|
||||
}
|
||||
if (targetTableStrategy === 'existing_only') {
|
||||
setTargetTableStrategy('smart');
|
||||
}
|
||||
if (!createIndexes) {
|
||||
setCreateIndexes(true);
|
||||
}
|
||||
} else {
|
||||
if (targetTableStrategy !== 'existing_only') {
|
||||
setTargetTableStrategy('existing_only');
|
||||
}
|
||||
if (createIndexes) {
|
||||
setCreateIndexes(false);
|
||||
}
|
||||
}
|
||||
}, [workflowType]);
|
||||
|
||||
const handleSourceConnChange = async (connId: string) => {
|
||||
setSourceConnId(connId);
|
||||
setSourceDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setSourceDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleTargetConnChange = async (connId: string) => {
|
||||
setTargetConnId(connId);
|
||||
setTargetDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setTargetDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const nextToTables = async () => {
|
||||
@@ -189,14 +359,17 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tables = (res.data as any[]).map((row: any) => row.Table || row.table || row.TABLE_NAME || Object.values(row)[0]);
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tableRows = Array.isArray(res.data) ? res.data : [];
|
||||
const tables = tableRows
|
||||
.map((row: any) => row?.Table || row?.table || row?.TABLE_NAME || Object.values(row || {})[0])
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== '');
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
message.error(res.message);
|
||||
}
|
||||
}
|
||||
@@ -236,6 +409,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: syncContent,
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
jobId,
|
||||
};
|
||||
|
||||
@@ -286,6 +462,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: "data",
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -362,6 +541,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
content: syncContent,
|
||||
mode: syncMode,
|
||||
autoAddColumns,
|
||||
targetTableStrategy,
|
||||
createIndexes,
|
||||
mongoCollectionName: mongoCollectionName.trim(),
|
||||
tableOptions,
|
||||
jobId,
|
||||
};
|
||||
@@ -402,10 +584,139 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
);
|
||||
};
|
||||
|
||||
const previewSql = useMemo(() => {
|
||||
if (!previewData || !previewTable) return { sqlText: '', statementCount: 0 };
|
||||
const targetType = String(connections.find(c => c.id === targetConnId)?.config?.type || '');
|
||||
const ops = tableOptions[previewTable] || { insert: true, update: true, delete: false };
|
||||
return buildSqlPreview(previewData, previewTable, targetType, ops);
|
||||
}, [previewData, previewTable, targetConnId, connections, tableOptions]);
|
||||
|
||||
const analysisWarnings = useMemo(() => {
|
||||
const items: string[] = [];
|
||||
diffTables.forEach((table) => {
|
||||
(table.warnings || []).forEach((warning) => items.push(`${table.table}: ${warning}`));
|
||||
(table.unsupportedObjects || []).forEach((warning) => items.push(`${table.table}: ${warning}`));
|
||||
});
|
||||
return Array.from(new Set(items));
|
||||
}, [diffTables]);
|
||||
|
||||
const isMigrationWorkflow = workflowType === 'migration';
|
||||
const sourceConn = useMemo(() => connections.find(c => c.id === sourceConnId), [connections, sourceConnId]);
|
||||
const targetConn = useMemo(() => connections.find(c => c.id === targetConnId), [connections, targetConnId]);
|
||||
const sourceType = String(sourceConn?.config?.type || '').toLowerCase();
|
||||
const targetType = String(targetConn?.config?.type || '').toLowerCase();
|
||||
const isRedisMongoKeyspaceMigration = isMigrationWorkflow && (
|
||||
(sourceType === 'redis' && targetType === 'mongodb') ||
|
||||
(sourceType === 'mongodb' && targetType === 'redis')
|
||||
);
|
||||
const defaultMongoCollectionName = useMemo(() => {
|
||||
if (sourceType === 'redis' && targetType === 'mongodb') {
|
||||
return `redis_db_${resolveRedisDbIndex(sourceDb || sourceConn?.config?.database)}_keys`;
|
||||
}
|
||||
if (sourceType === 'mongodb' && targetType === 'redis') {
|
||||
return selectedTables[0] || `redis_db_${resolveRedisDbIndex(targetDb || targetConn?.config?.database)}_keys`;
|
||||
}
|
||||
return '';
|
||||
}, [sourceType, targetType, sourceDb, targetDb, sourceConn, targetConn, selectedTables]);
|
||||
|
||||
const modalPanelStyle = useMemo(() => ({
|
||||
background: darkMode
|
||||
? 'linear-gradient(180deg, rgba(16,22,34,0.96) 0%, rgba(10,14,24,0.98) 100%)'
|
||||
: 'linear-gradient(180deg, rgba(255,255,255,0.98) 0%, rgba(246,248,252,0.98) 100%)',
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(16,24,40,0.08)',
|
||||
boxShadow: darkMode ? '0 24px 56px rgba(0,0,0,0.36)' : '0 18px 44px rgba(15,23,42,0.14)',
|
||||
backdropFilter: darkMode ? 'blur(18px)' : 'none',
|
||||
}), [darkMode]);
|
||||
|
||||
const shellCardStyle = useMemo<React.CSSProperties>(() => ({
|
||||
borderRadius: 18,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.03)' : `rgba(255,255,255,${Math.max(effectiveOpacity, 0.88)})`,
|
||||
boxShadow: darkMode ? '0 12px 32px rgba(0,0,0,0.22)' : '0 10px 24px rgba(15,23,42,0.08)',
|
||||
overflow: 'hidden',
|
||||
}), [darkMode, effectiveOpacity]);
|
||||
|
||||
const heroPanelStyle = useMemo<React.CSSProperties>(() => ({
|
||||
padding: 18,
|
||||
borderRadius: 18,
|
||||
border: darkMode ? '1px solid rgba(255,214,102,0.12)' : '1px solid rgba(24,144,255,0.12)',
|
||||
background: darkMode
|
||||
? 'linear-gradient(135deg, rgba(255,214,102,0.10) 0%, rgba(255,255,255,0.03) 100%)'
|
||||
: 'linear-gradient(135deg, rgba(24,144,255,0.10) 0%, rgba(255,255,255,0.95) 100%)',
|
||||
marginBottom: 18,
|
||||
}), [darkMode]);
|
||||
|
||||
const badgeStyle = useMemo<React.CSSProperties>(() => ({
|
||||
display: 'inline-flex',
|
||||
alignItems: 'center',
|
||||
gap: 6,
|
||||
padding: '6px 10px',
|
||||
borderRadius: 999,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.10)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.04)' : 'rgba(255,255,255,0.86)',
|
||||
color: darkMode ? 'rgba(255,255,255,0.88)' : '#334155',
|
||||
fontSize: 12,
|
||||
fontWeight: 600,
|
||||
}), [darkMode]);
|
||||
|
||||
const quietPanelStyle = useMemo<React.CSSProperties>(() => ({
|
||||
padding: 14,
|
||||
borderRadius: 16,
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.08)',
|
||||
background: darkMode ? 'rgba(255,255,255,0.025)' : 'rgba(248,250,252,0.92)',
|
||||
}), [darkMode]);
|
||||
|
||||
const modalWorkspaceStyle = useMemo<React.CSSProperties>(() => ({
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
height: '100%',
|
||||
minHeight: 0,
|
||||
}), []);
|
||||
|
||||
const modalScrollableContentStyle = useMemo<React.CSSProperties>(() => ({
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
overflowY: 'auto',
|
||||
overflowX: 'hidden',
|
||||
paddingRight: 4,
|
||||
overscrollBehavior: 'contain',
|
||||
}), []);
|
||||
|
||||
const modalFooterBarStyle = useMemo<React.CSSProperties>(() => ({
|
||||
marginTop: 18,
|
||||
display: 'flex',
|
||||
justifyContent: 'flex-end',
|
||||
gap: 8,
|
||||
paddingTop: 12,
|
||||
borderTop: darkMode ? '1px solid rgba(255,255,255,0.06)' : '1px solid rgba(15,23,42,0.06)',
|
||||
flex: '0 0 auto',
|
||||
}), [darkMode]);
|
||||
|
||||
const renderModalTitle = (title: string, description: string) => (
|
||||
<div style={{ display: 'flex', alignItems: 'flex-start', gap: 12 }}>
|
||||
<div style={{
|
||||
width: 38,
|
||||
height: 38,
|
||||
borderRadius: 14,
|
||||
display: 'grid',
|
||||
placeItems: 'center',
|
||||
background: darkMode ? 'rgba(255,214,102,0.12)' : 'rgba(24,144,255,0.10)',
|
||||
color: darkMode ? '#ffd666' : token.colorPrimary,
|
||||
flexShrink: 0,
|
||||
}}>
|
||||
{isMigrationWorkflow ? <RocketOutlined /> : <SwapOutlined />}
|
||||
</div>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontSize: 16, fontWeight: 700, color: darkMode ? '#f8fafc' : '#0f172a' }}>{title}</div>
|
||||
<div style={{ marginTop: 4, fontSize: 12, lineHeight: 1.6, color: darkMode ? 'rgba(255,255,255,0.56)' : 'rgba(15,23,42,0.58)' }}>{description}</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title="数据同步"
|
||||
title={renderModalTitle(isMigrationWorkflow ? '跨库迁移工作台' : '数据同步工作台', isMigrationWorkflow ? '按源库 → 目标库完成建表、导入与风险预检。' : '按已有目标表完成差异对比、同步执行与结果确认。')}
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
if (syncing) {
|
||||
@@ -414,23 +725,61 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
}
|
||||
onClose();
|
||||
}}
|
||||
width={800}
|
||||
width={920}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
closable={!syncing}
|
||||
maskClosable={!syncing}
|
||||
styles={{
|
||||
content: modalPanelStyle,
|
||||
header: { background: 'transparent', borderBottom: 'none', paddingBottom: 10 },
|
||||
body: {
|
||||
paddingTop: 8,
|
||||
height: 760,
|
||||
maxHeight: 'calc(100vh - 120px)',
|
||||
overflow: 'hidden',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
},
|
||||
footer: { background: 'transparent', borderTop: 'none', paddingTop: 12 },
|
||||
}}
|
||||
>
|
||||
<div style={modalWorkspaceStyle}>
|
||||
<div style={{ flex: '0 0 auto' }}>
|
||||
<div style={heroPanelStyle}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', gap: 12, alignItems: 'flex-start', flexWrap: 'wrap' }}>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontSize: 18, fontWeight: 700, color: darkMode ? '#f8fafc' : '#0f172a' }}>{isMigrationWorkflow ? '跨数据源迁移' : '数据同步'}</div>
|
||||
<div style={{ marginTop: 6, fontSize: 13, lineHeight: 1.7, color: darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(15,23,42,0.62)' }}>
|
||||
{isMigrationWorkflow
|
||||
? '适合把源表迁移到另一套数据库,可按策略自动建表、导入数据并补建可兼容索引。'
|
||||
: '适合目标表已存在的场景,先做差异分析,再按勾选执行插入、更新或删除。'}
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: 8 }}>
|
||||
<span style={badgeStyle}>{isMigrationWorkflow ? <RocketOutlined /> : <SwapOutlined />} {isMigrationWorkflow ? '迁移模式' : '同步模式'}</span>
|
||||
<span style={badgeStyle}><DatabaseOutlined /> {sourceConnId ? '已选源连接' : '待选源连接'}</span>
|
||||
<span style={badgeStyle}><TableOutlined /> {selectedTables.length || 0} 张表</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<Steps current={currentStep} style={{ marginBottom: 24 }}>
|
||||
<Step title="配置源与目标" />
|
||||
<Step title="选择表" />
|
||||
<Step title="执行结果" />
|
||||
</Steps>
|
||||
</div>
|
||||
|
||||
<div style={modalScrollableContentStyle}>
|
||||
{/* STEP 1: CONFIG */}
|
||||
{currentStep === 0 && (
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<div style={{ display: 'grid', gridTemplateColumns: 'minmax(0, 1fr) 44px minmax(0, 1fr)', gap: 18, alignItems: 'stretch' }}>
|
||||
<Card
|
||||
title="源数据库"
|
||||
style={shellCardStyle}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
@@ -444,8 +793,16 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<div style={{ display: 'grid', placeItems: 'center' }}>
|
||||
<div style={{ ...badgeStyle, width: 44, height: 44, borderRadius: 14, justifyContent: 'center', padding: 0 }}>
|
||||
<SwapOutlined />
|
||||
</div>
|
||||
</div>
|
||||
<Card
|
||||
title="目标数据库"
|
||||
style={shellCardStyle}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
@@ -461,27 +818,94 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<Card title="同步选项" style={{ marginTop: 16 }}>
|
||||
<Card
|
||||
title={isMigrationWorkflow ? '迁移选项' : '同步选项'}
|
||||
style={{ ...shellCardStyle, marginTop: 18 }}
|
||||
styles={{ header: { borderBottom: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)', fontWeight: 700 }, body: { padding: 18 } }}
|
||||
>
|
||||
<div style={{ ...quietPanelStyle, marginBottom: 14 }}>
|
||||
<Text style={{ color: darkMode ? 'rgba(255,255,255,0.72)' : 'rgba(15,23,42,0.68)', lineHeight: 1.7 }}>
|
||||
先明确当前要做的是“已有目标表同步”还是“跨库迁移”,页面会按功能类型自动给出更安全的默认策略。
|
||||
</Text>
|
||||
</div>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="同步内容">
|
||||
<Form.Item label="功能类型">
|
||||
<Select value={workflowType} onChange={setWorkflowType}>
|
||||
<Option value="sync">数据同步(基于已有目标表做差异同步)</Option>
|
||||
<Option value="migration">跨库迁移(可自动建表后导入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Alert
|
||||
type={isMigrationWorkflow ? 'info' : 'success'}
|
||||
showIcon
|
||||
style={{ marginBottom: 12 }}
|
||||
message={isMigrationWorkflow
|
||||
? '当前为“跨库迁移”模式:适合将表迁移到另一数据源,可自动建表并导入数据。'
|
||||
: '当前为“数据同步”模式:适合目标表已存在时做增量同步或覆盖导入。'}
|
||||
/>
|
||||
<Form.Item label={isMigrationWorkflow ? '迁移内容' : '同步内容'}>
|
||||
<Select value={syncContent} onChange={setSyncContent}>
|
||||
<Option value="data">仅同步数据</Option>
|
||||
<Option value="schema">仅同步结构</Option>
|
||||
<Option value="both">同步结构 + 数据</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="同步模式">
|
||||
<Form.Item label={isMigrationWorkflow ? '迁移模式' : '同步模式'}>
|
||||
<Select value={syncMode} onChange={setSyncMode} disabled={syncContent === 'schema'}>
|
||||
<Option value="insert_update">增量同步(对比差异,按插入/更新/删除勾选执行)</Option>
|
||||
<Option value="insert_only">仅插入(不对比目标;无主键表将跳过)</Option>
|
||||
<Option value="full_overwrite">全量覆盖(清空目标表后插入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label={isMigrationWorkflow ? '目标表处理策略' : '目标表要求'}>
|
||||
<Select value={targetTableStrategy} onChange={setTargetTableStrategy} disabled={!isMigrationWorkflow}>
|
||||
<Option value="existing_only">仅使用已有目标表</Option>
|
||||
<Option value="auto_create_if_missing">目标表不存在时自动建表后导入</Option>
|
||||
<Option value="smart">智能模式(存在则直接导入,不存在则自动建表)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{isRedisMongoKeyspaceMigration && (
|
||||
<Form.Item
|
||||
label="Mongo 集合名(可选)"
|
||||
extra={sourceType === 'redis'
|
||||
? '为空时沿用默认集合名;填写后本次 Redis 键空间会统一写入该 Mongo 集合。'
|
||||
: 'MongoDB → Redis 场景下通常直接选择源集合;这里留空即可,未显式选集合时才会回退使用该名称。'}
|
||||
>
|
||||
<Input
|
||||
value={mongoCollectionName}
|
||||
onChange={(e) => setMongoCollectionName(e.target.value)}
|
||||
placeholder={defaultMongoCollectionName || '请输入 Mongo 集合名'}
|
||||
allowClear
|
||||
maxLength={128}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
<Form.Item>
|
||||
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
|
||||
自动补齐目标表缺失字段(仅 MySQL 目标)
|
||||
自动补齐目标表缺失字段(当前支持 MySQL 目标及 MySQL → Kingbase)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Checkbox checked={createIndexes} onChange={(e) => setCreateIndexes(e.target.checked)} disabled={!isMigrationWorkflow || targetTableStrategy === 'existing_only'}>
|
||||
自动迁移可兼容的普通索引/唯一索引(仅自动建表模式生效)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
{isMigrationWorkflow && targetTableStrategy !== 'existing_only' && (
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="自动建表模式首期仅支持 MySQL → Kingbase;将迁移字段、主键、普通/唯一/联合索引,并显式跳过全文、空间、前缀、函数类索引。"
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
{!isMigrationWorkflow && (
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="数据同步模式默认基于已有目标表执行;如需跨数据源建表导入,请切换到“跨库迁移”。"
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
{syncContent !== 'schema' && syncMode === 'full_overwrite' && (
|
||||
<Alert
|
||||
type="warning"
|
||||
@@ -496,26 +920,42 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 2: TABLES */}
|
||||
{currentStep === 1 && (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 14 }}>
|
||||
<div style={quietPanelStyle}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: 10 }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
|
||||
显示相同表
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Transfer
|
||||
</div>
|
||||
<Transfer
|
||||
dataSource={allTables.map(t => ({ key: t, title: t }))}
|
||||
titles={['源表', '已选表']}
|
||||
targetKeys={selectedTables}
|
||||
onChange={(keys) => setSelectedTables(keys as string[])}
|
||||
render={item => item.title}
|
||||
listStyle={{ width: 350, height: 280, marginTop: 0 }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表', notFoundContent: '暂无数据' }}
|
||||
listStyle={{ width: 390, height: 320, marginTop: 0, borderRadius: 14, overflow: 'hidden' }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表…', notFoundContent: '暂无数据' }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{diffTables.length > 0 && (
|
||||
<div>
|
||||
<Divider orientation="left">对比结果</Divider>
|
||||
<div style={quietPanelStyle}>
|
||||
<Divider orientation="left" style={{ marginTop: 0 }}>对比结果</Divider>
|
||||
{analysisWarnings.length > 0 && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="预检发现风险或降级项,请在执行前确认"
|
||||
description={
|
||||
<ul style={{ margin: 0, paddingLeft: 18 }}>
|
||||
{analysisWarnings.slice(0, 8).map((item) => <li key={item}>{item}</li>)}
|
||||
{analysisWarnings.length > 8 && <li>还有 {analysisWarnings.length - 8} 项未展开</li>}
|
||||
</ul>
|
||||
}
|
||||
style={{ marginBottom: 12 }}
|
||||
/>
|
||||
)}
|
||||
<Table
|
||||
size="small"
|
||||
pagination={false}
|
||||
@@ -527,13 +967,29 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const same = Number(t.same || 0);
|
||||
const msg = String(t.message || '').trim();
|
||||
const can = !!t.canSync;
|
||||
const warns = Array.isArray(t.warnings) ? t.warnings.length : 0;
|
||||
const unsupported = Array.isArray(t.unsupportedObjects) ? t.unsupportedObjects.length : 0;
|
||||
if (showSameTables) return true;
|
||||
if (!can) return true;
|
||||
if (msg) return true;
|
||||
if (msg || warns > 0 || unsupported > 0) return true;
|
||||
return ins > 0 || upd > 0 || del > 0 || same === 0;
|
||||
})}
|
||||
columns={[
|
||||
{ title: '表名', dataIndex: 'table', key: 'table', ellipsis: true },
|
||||
{
|
||||
title: '目标表',
|
||||
key: 'targetTableExists',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => r.targetTableExists ? '已存在' : '不存在'
|
||||
},
|
||||
{
|
||||
title: '计划',
|
||||
dataIndex: 'plannedAction',
|
||||
key: 'plannedAction',
|
||||
width: 220,
|
||||
ellipsis: true,
|
||||
render: (v: any) => String(v || '')
|
||||
},
|
||||
{
|
||||
title: '插入',
|
||||
key: 'inserts',
|
||||
@@ -542,11 +998,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.inserts || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.insert}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.insert} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}>
|
||||
{Number(r.inserts || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
@@ -560,11 +1012,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.updates || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.update}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.update} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}>
|
||||
{Number(r.updates || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
@@ -578,18 +1026,28 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.deletes || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.delete}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}
|
||||
>
|
||||
<Checkbox checked={!!ops.delete} disabled={disabled} onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}>
|
||||
{Number(r.deletes || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{ title: '相同', dataIndex: 'same', key: 'same', width: 70, render: (v: any) => Number(v || 0) },
|
||||
{ title: '消息', dataIndex: 'message', key: 'message', ellipsis: true, render: (v: any) => (v ? String(v) : '') },
|
||||
{
|
||||
title: '风险',
|
||||
key: 'warnings',
|
||||
width: 220,
|
||||
render: (_: any, r: any) => {
|
||||
const warns = [...(Array.isArray(r.warnings) ? r.warnings : []), ...(Array.isArray(r.unsupportedObjects) ? r.unsupportedObjects : [])];
|
||||
if (warns.length === 0) return '-';
|
||||
return (
|
||||
<div style={{ color: '#d48806', fontSize: 12, lineHeight: 1.5 }}>
|
||||
{warns.slice(0, 2).map((item: string) => <div key={item}>{item}</div>)}
|
||||
{warns.length > 2 && <div>还有 {warns.length - 2} 项</div>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '预览',
|
||||
key: 'preview',
|
||||
@@ -613,7 +1071,8 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 3: RESULT */}
|
||||
{currentStep === 2 && (
|
||||
<div>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 14 }}>
|
||||
<div style={quietPanelStyle}>
|
||||
<Alert
|
||||
message={syncing ? "正在同步" : (syncResult?.success ? "同步完成" : "同步失败")}
|
||||
description={
|
||||
@@ -625,7 +1084,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
showIcon
|
||||
/>
|
||||
|
||||
<div style={{ marginTop: 12 }}>
|
||||
<div style={{ marginTop: 14 }}>
|
||||
<Progress
|
||||
percent={syncProgress.percent}
|
||||
status={syncing ? "active" : (syncResult?.success ? "success" : "exception")}
|
||||
@@ -633,7 +1092,9 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Divider orientation="left">日志</Divider>
|
||||
</div>
|
||||
<div style={quietPanelStyle}>
|
||||
<Divider orientation="left" style={{ marginTop: 0 }}>执行日志</Divider>
|
||||
<div
|
||||
ref={logBoxRef}
|
||||
onScroll={() => {
|
||||
@@ -642,14 +1103,25 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const nearBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
|
||||
autoScrollRef.current = nearBottom;
|
||||
}}
|
||||
style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}
|
||||
style={{
|
||||
background: darkMode ? 'rgba(255,255,255,0.03)' : 'rgba(248,250,252,0.92)',
|
||||
border: darkMode ? '1px solid rgba(255,255,255,0.08)' : '1px solid rgba(15,23,42,0.06)',
|
||||
borderRadius: 14,
|
||||
padding: 12,
|
||||
height: 300,
|
||||
overflowY: 'auto',
|
||||
fontFamily: 'SFMono-Regular, ui-monospace, Menlo, Consolas, monospace'
|
||||
}}
|
||||
>
|
||||
{syncLogs.map((item, i: number) => <div key={i}>{renderSyncLogItem(item)}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ marginTop: 24, textAlign: 'right' }}>
|
||||
</div>
|
||||
|
||||
<div style={modalFooterBarStyle}>
|
||||
{currentStep === 0 && (
|
||||
<Button type="primary" onClick={nextToTables} loading={loading}>下一步</Button>
|
||||
)}
|
||||
@@ -676,14 +1148,16 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
<Drawer
|
||||
title={`差异预览:${previewTable}`}
|
||||
styles={{ body: { background: darkMode ? 'rgba(9,13,20,0.98)' : '#f8fafc' } }}
|
||||
open={previewOpen}
|
||||
onClose={() => { setPreviewOpen(false); setPreviewTable(''); setPreviewData(null); }}
|
||||
width={900}
|
||||
>
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览..." />}
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览…" />}
|
||||
{!previewLoading && previewData && (
|
||||
<div>
|
||||
<Alert
|
||||
@@ -794,6 +1268,51 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'sql',
|
||||
label: `SQL(${previewSql.statementCount})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="SQL 预览会按当前勾选的插入/更新/删除与行选择范围生成,用于审核确认。"
|
||||
/>
|
||||
<div style={{ marginTop: 8, marginBottom: 8, display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">共 {previewSql.statementCount} 条语句(预览数据最多 200 条/类型)</Text>
|
||||
<Button
|
||||
size="small"
|
||||
disabled={!previewSql.sqlText}
|
||||
onClick={async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(previewSql.sqlText || '');
|
||||
message.success('SQL 已复制');
|
||||
} catch {
|
||||
message.error('复制失败,请手动复制');
|
||||
}
|
||||
}}
|
||||
>
|
||||
复制 SQL
|
||||
</Button>
|
||||
</div>
|
||||
<pre
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 10,
|
||||
border: '1px solid #f0f0f0',
|
||||
borderRadius: 6,
|
||||
background: '#fafafa',
|
||||
maxHeight: 420,
|
||||
overflow: 'auto',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-word'
|
||||
}}
|
||||
>
|
||||
{previewSql.sqlText || '-- 当前勾选范围下无 SQL 可预览'}
|
||||
</pre>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
@@ -1,12 +1,207 @@
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef, useMemo } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { buildWhereSQL, quoteIdentPart, quoteQualifiedIdent } from '../utils/sql';
|
||||
import { buildOrderBySQL, buildPaginatedSelectSQL, buildWhereSQL, quoteIdentPart, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { buildMongoCountCommand, buildMongoFilter, buildMongoFindCommand, buildMongoSort } from '../utils/mongodb';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
|
||||
type ViewerPaginationState = {
|
||||
current: number;
|
||||
pageSize: number;
|
||||
total: number;
|
||||
totalKnown: boolean;
|
||||
totalApprox: boolean;
|
||||
totalCountLoading: boolean;
|
||||
totalCountCancelled: boolean;
|
||||
};
|
||||
|
||||
const JS_MAX_SAFE_INTEGER_BIGINT = BigInt(Number.MAX_SAFE_INTEGER);
|
||||
|
||||
const isIntegerText = (text: string): boolean => /^[+-]?\d+$/.test(text);
|
||||
|
||||
const toNonNegativeFiniteNumber = (value: unknown): number | null => {
|
||||
if (typeof value === 'number') {
|
||||
return Number.isFinite(value) && value >= 0 && value <= Number.MAX_SAFE_INTEGER ? value : null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return value >= 0n && value <= JS_MAX_SAFE_INTEGER_BIGINT ? Number(value) : null;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
const text = value.trim();
|
||||
if (!text) return null;
|
||||
if (isIntegerText(text)) {
|
||||
try {
|
||||
const parsedBigInt = BigInt(text);
|
||||
if (parsedBigInt < 0n || parsedBigInt > JS_MAX_SAFE_INTEGER_BIGINT) {
|
||||
return null;
|
||||
}
|
||||
return Number(parsedBigInt);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
const parsed = Number(text);
|
||||
return Number.isFinite(parsed) && parsed >= 0 && parsed <= Number.MAX_SAFE_INTEGER ? parsed : null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseTotalFromCountRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized === 'total' || normalized === 'count' || normalized.includes('count')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [, raw] of entries) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseDuckDBApproxTotalRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
const preferredKeys = ['approx_total', 'estimated_size', 'estimated_rows', 'row_count', 'count', 'total'];
|
||||
for (const preferred of preferredKeys) {
|
||||
for (const [key, raw] of entries) {
|
||||
if (String(key || '').trim().toLowerCase() !== preferred) continue;
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized.includes('estimate') || normalized.includes('row') || normalized.includes('count') || normalized.includes('total')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const normalizeDuckDBIdentifier = (raw: string): string => {
|
||||
const text = String(raw || '').trim();
|
||||
if (text.length >= 2) {
|
||||
const first = text[0];
|
||||
const last = text[text.length - 1];
|
||||
if ((first === '"' && last === '"') || (first === '`' && last === '`')) {
|
||||
return text.slice(1, -1).trim();
|
||||
}
|
||||
}
|
||||
return text;
|
||||
};
|
||||
|
||||
const resolveDuckDBSchemaAndTable = (dbName: string, tableName: string) => {
|
||||
const rawTable = String(tableName || '').trim();
|
||||
if (!rawTable) return { schemaName: 'main', pureTableName: '' };
|
||||
|
||||
const parts = rawTable.split('.');
|
||||
if (parts.length >= 2) {
|
||||
const pureTableName = normalizeDuckDBIdentifier(parts[parts.length - 1]);
|
||||
const schemaName = normalizeDuckDBIdentifier(parts[parts.length - 2]);
|
||||
if (schemaName && pureTableName) {
|
||||
return { schemaName, pureTableName };
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackSchema = normalizeDuckDBIdentifier(String(dbName || '').trim()) || 'main';
|
||||
return { schemaName: fallbackSchema, pureTableName: normalizeDuckDBIdentifier(rawTable) };
|
||||
};
|
||||
|
||||
const escapeSQLLiteral = (value: string): string => String(value || '').replace(/'/g, "''");
|
||||
|
||||
const isDuckDBUnsupportedTypeError = (msg: string): boolean => /unsupported\s*type:\s*duckdb\./i.test(String(msg || ''));
|
||||
|
||||
const isDuckDBComplexColumnType = (columnType?: string): boolean => {
|
||||
const raw = String(columnType || '').trim().toLowerCase();
|
||||
if (!raw) return false;
|
||||
return raw.includes('map') || raw.includes('struct') || raw.includes('union') || raw.includes('array') || raw.includes('list');
|
||||
};
|
||||
|
||||
const reverseOrderBySQL = (orderBySQL: string): string => {
|
||||
const raw = String(orderBySQL || '').trim();
|
||||
if (!raw) return '';
|
||||
const body = raw.replace(/^order\s+by\s+/i, '').trim();
|
||||
if (!body) return '';
|
||||
|
||||
const parts = body
|
||||
.split(',')
|
||||
.map((part) => part.trim())
|
||||
.filter(Boolean)
|
||||
.map((part) => {
|
||||
if (/\s+asc$/i.test(part)) return part.replace(/\s+asc$/i, ' DESC');
|
||||
if (/\s+desc$/i.test(part)) return part.replace(/\s+desc$/i, ' ASC');
|
||||
return `${part} DESC`;
|
||||
});
|
||||
if (parts.length === 0) return '';
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
};
|
||||
|
||||
type ViewerFilterSnapshot = {
|
||||
showFilter: boolean;
|
||||
conditions: FilterCondition[];
|
||||
currentPage: number;
|
||||
pageSize: number;
|
||||
sortInfo: { columnKey: string, order: string } | null;
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
};
|
||||
|
||||
type ViewerScrollSnapshot = {
|
||||
top: number;
|
||||
left: number;
|
||||
};
|
||||
|
||||
const viewerFilterSnapshotsByTab = new Map<string, ViewerFilterSnapshot>();
|
||||
|
||||
const normalizeViewerFilterConditions = (conditions: FilterCondition[] | undefined): FilterCondition[] => {
|
||||
if (!Array.isArray(conditions)) return [];
|
||||
return conditions.map((cond) => ({
|
||||
id: Number.isFinite(Number(cond?.id)) ? Number(cond?.id) : undefined,
|
||||
enabled: cond?.enabled !== false,
|
||||
logic: String(cond?.logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND',
|
||||
column: String(cond?.column || ''),
|
||||
op: String(cond?.op || '='),
|
||||
value: String(cond?.value ?? ''),
|
||||
value2: String(cond?.value2 ?? ''),
|
||||
}));
|
||||
};
|
||||
|
||||
const getViewerFilterSnapshot = (tabId: string): ViewerFilterSnapshot => {
|
||||
const cached = viewerFilterSnapshotsByTab.get(String(tabId || '').trim());
|
||||
if (!cached) {
|
||||
return { showFilter: false, conditions: [], currentPage: 1, pageSize: 100, sortInfo: null, scrollTop: 0, scrollLeft: 0 };
|
||||
}
|
||||
return {
|
||||
showFilter: cached.showFilter === true,
|
||||
conditions: normalizeViewerFilterConditions(cached.conditions),
|
||||
currentPage: Number.isFinite(Number(cached.currentPage)) && Number(cached.currentPage) > 0 ? Number(cached.currentPage) : 1,
|
||||
pageSize: Number.isFinite(Number(cached.pageSize)) && Number(cached.pageSize) > 0 ? Number(cached.pageSize) : 100,
|
||||
sortInfo: cached.sortInfo && cached.sortInfo.columnKey && (cached.sortInfo.order === 'ascend' || cached.sortInfo.order === 'descend')
|
||||
? { columnKey: String(cached.sortInfo.columnKey), order: cached.sortInfo.order }
|
||||
: null,
|
||||
scrollTop: Number.isFinite(Number(cached.scrollTop)) ? Number(cached.scrollTop) : 0,
|
||||
scrollLeft: Number.isFinite(Number(cached.scrollLeft)) ? Number(cached.scrollLeft) : 0,
|
||||
};
|
||||
};
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const initialViewerSnapshot = useMemo(() => getViewerFilterSnapshot(tab.id), [tab.id]);
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
const [columnNames, setColumnNames] = useState<string[]>([]);
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
@@ -16,27 +211,174 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const fetchSeqRef = useRef(0);
|
||||
const countSeqRef = useRef(0);
|
||||
const countKeyRef = useRef<string>('');
|
||||
const duckdbApproxSeqRef = useRef(0);
|
||||
const duckdbApproxKeyRef = useRef<string>('');
|
||||
const manualCountSeqRef = useRef(0);
|
||||
const manualCountKeyRef = useRef<string>('');
|
||||
const pkSeqRef = useRef(0);
|
||||
const pkKeyRef = useRef<string>('');
|
||||
const latestConfigRef = useRef<any>(null);
|
||||
const latestDbTypeRef = useRef<string>('');
|
||||
const latestDbNameRef = useRef<string>('');
|
||||
const latestCountSqlRef = useRef<string>('');
|
||||
const latestCountKeyRef = useRef<string>('');
|
||||
const scrollSnapshotRef = useRef<ViewerScrollSnapshot>({
|
||||
top: initialViewerSnapshot.scrollTop,
|
||||
left: initialViewerSnapshot.scrollLeft,
|
||||
});
|
||||
const initialLoadRef = useRef(false);
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
const [pagination, setPagination] = useState<ViewerPaginationState>({
|
||||
current: initialViewerSnapshot.currentPage,
|
||||
pageSize: initialViewerSnapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(initialViewerSnapshot.sortInfo);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const [filterConditions, setFilterConditions] = useState<any[]>([]);
|
||||
const [showFilter, setShowFilter] = useState<boolean>(initialViewerSnapshot.showFilter);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>(initialViewerSnapshot.conditions);
|
||||
const duckdbSafeSelectCacheRef = useRef<Record<string, string>>({});
|
||||
const currentConnConfig = connections.find(c => c.id === tab.connectionId)?.config;
|
||||
const currentConnCaps = getDataSourceCapabilities(currentConnConfig);
|
||||
const currentConnType = currentConnCaps.type;
|
||||
const forceReadOnly = currentConnCaps.forceReadOnlyQueryResult;
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setShowFilter(snapshot.showFilter);
|
||||
setFilterConditions(snapshot.conditions);
|
||||
setSortInfo(snapshot.sortInfo);
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
}, [tab.id]);
|
||||
|
||||
useEffect(() => {
|
||||
viewerFilterSnapshotsByTab.set(tab.id, {
|
||||
showFilter,
|
||||
conditions: normalizeViewerFilterConditions(filterConditions),
|
||||
currentPage: pagination.current,
|
||||
pageSize: pagination.pageSize,
|
||||
sortInfo,
|
||||
scrollTop: scrollSnapshotRef.current.top,
|
||||
scrollLeft: scrollSnapshotRef.current.left,
|
||||
});
|
||||
}, [tab.id, showFilter, filterConditions, pagination.current, pagination.pageSize, sortInfo]);
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setPkColumns([]);
|
||||
pkKeyRef.current = '';
|
||||
countKeyRef.current = '';
|
||||
setPagination(prev => ({ ...prev, current: 1, total: 0, totalKnown: false }));
|
||||
}, [tab.connectionId, tab.dbName, tab.tableName]);
|
||||
duckdbApproxKeyRef.current = '';
|
||||
manualCountKeyRef.current = '';
|
||||
duckdbSafeSelectCacheRef.current = {};
|
||||
latestConfigRef.current = null;
|
||||
latestDbTypeRef.current = '';
|
||||
latestDbNameRef.current = '';
|
||||
latestCountSqlRef.current = '';
|
||||
latestCountKeyRef.current = '';
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
current: snapshot.currentPage,
|
||||
pageSize: snapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName]);
|
||||
|
||||
const handleTableScrollSnapshotChange = useCallback((snapshot: ViewerScrollSnapshot) => {
|
||||
scrollSnapshotRef.current = snapshot;
|
||||
const cached = getViewerFilterSnapshot(tab.id);
|
||||
viewerFilterSnapshotsByTab.set(tab.id, {
|
||||
...cached,
|
||||
scrollTop: snapshot.top,
|
||||
scrollLeft: snapshot.left,
|
||||
});
|
||||
}, [tab.id]);
|
||||
|
||||
const handleDuckDBManualCount = useCallback(async () => {
|
||||
if (latestDbTypeRef.current !== 'duckdb') {
|
||||
return;
|
||||
}
|
||||
const config = latestConfigRef.current;
|
||||
const dbName = latestDbNameRef.current;
|
||||
const countSql = latestCountSqlRef.current;
|
||||
const countKey = latestCountKeyRef.current;
|
||||
|
||||
if (!config || !countSql || !countKey) {
|
||||
message.warning('当前结果集尚未就绪,请先执行一次加载');
|
||||
return;
|
||||
}
|
||||
|
||||
manualCountKeyRef.current = countKey;
|
||||
const countSeq = ++manualCountSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: true, totalCountCancelled: false }));
|
||||
const countConfig: any = { ...(config as any), timeout: 120 };
|
||||
|
||||
try {
|
||||
const resCount = await DBQuery(countConfig as any, dbName, countSql);
|
||||
const countDuration = Date.now() - countStart;
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-duckdb-manual-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount?.success ? 'success' : 'error',
|
||||
duration: countDuration,
|
||||
message: resCount?.success ? '' : String(resCount?.message || '统计失败'),
|
||||
dbName
|
||||
});
|
||||
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
|
||||
if (!resCount?.success) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(String(resCount?.message || '统计总数失败'));
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
return;
|
||||
}
|
||||
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error('统计结果解析失败');
|
||||
return;
|
||||
}
|
||||
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
} catch (e: any) {
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(`统计总数失败: ${String(e?.message || e)}`);
|
||||
}
|
||||
}, [addSqlLog]);
|
||||
|
||||
const handleDuckDBCancelManualCount = useCallback(() => {
|
||||
manualCountSeqRef.current++;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false, totalCountCancelled: true }));
|
||||
}, []);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
@@ -58,40 +400,163 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
};
|
||||
|
||||
const dbType = config.type || '';
|
||||
const dbTypeLower = String(dbType || '').trim().toLowerCase();
|
||||
const isMySQLFamily = dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros';
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const tableName = tab.tableName || '';
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
if (sortInfo && sortInfo.order) {
|
||||
sql += ` ORDER BY ${quoteIdentPart(dbType, sortInfo.columnKey)} ${sortInfo.order === 'ascend' ? 'ASC' : 'DESC'}`;
|
||||
const isMongoDB = dbTypeLower === 'mongodb';
|
||||
let mongoFilter: Record<string, unknown> | undefined;
|
||||
if (isMongoDB) {
|
||||
try {
|
||||
mongoFilter = buildMongoFilter(filterConditions);
|
||||
} catch (e: any) {
|
||||
message.error(`Mongo 筛选条件无效:${String(e?.message || e || '解析失败')}`);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
const offset = (page - 1) * size;
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
|
||||
const startTime = Date.now();
|
||||
try {
|
||||
const pData = DBQuery(config as any, dbName, sql);
|
||||
|
||||
const resData = await pData;
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log Execution
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: sql,
|
||||
status: resData.success ? 'success' : 'error',
|
||||
duration: duration,
|
||||
message: resData.success ? '' : resData.message,
|
||||
affectedRows: Array.isArray(resData.data) ? resData.data.length : undefined,
|
||||
dbName
|
||||
const whereSQL = isMongoDB
|
||||
? JSON.stringify(mongoFilter || {})
|
||||
: buildWhereSQL(dbType, filterConditions);
|
||||
const countSql = isMongoDB
|
||||
? buildMongoCountCommand(tableName, mongoFilter || {})
|
||||
: `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
const orderBySQL = isMongoDB ? '' : buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const totalRows = Number(pagination.total);
|
||||
const hasFiniteTotal = Number.isFinite(totalRows) && totalRows >= 0;
|
||||
const totalKnown = pagination.totalKnown && hasFiniteTotal;
|
||||
const totalPages = hasFiniteTotal ? Math.max(1, Math.ceil(totalRows / size)) : 0;
|
||||
const currentPage = totalPages > 0 ? Math.min(Math.max(1, page), totalPages) : Math.max(1, page);
|
||||
const offset = (currentPage - 1) * size;
|
||||
const isClickHouse = !isMongoDB && dbTypeLower === 'clickhouse';
|
||||
const reverseOrderSQL = isClickHouse ? reverseOrderBySQL(orderBySQL) : '';
|
||||
let useClickHouseReversePagination = false;
|
||||
let clickHouseReverseLimit = 0;
|
||||
let clickHouseReverseHasMore = false;
|
||||
let sql = '';
|
||||
if (isMongoDB) {
|
||||
const mongoSort = buildMongoSort(sortInfo, pkColumns);
|
||||
sql = buildMongoFindCommand({
|
||||
collection: tableName,
|
||||
filter: mongoFilter || {},
|
||||
sort: mongoSort,
|
||||
limit: size + 1,
|
||||
skip: offset,
|
||||
});
|
||||
} else {
|
||||
const baseSql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql = `${baseSql}${orderBySQL}`;
|
||||
// ClickHouse 深分页在超大 OFFSET 下容易超时。对于总数已知且存在 ORDER BY 的场景,
|
||||
// 当“尾部偏移”小于“头部偏移”时,改为反向 ORDER BY + 小 OFFSET,并在前端翻转结果。
|
||||
if (isClickHouse && totalKnown && offset > 0 && reverseOrderSQL) {
|
||||
const pageRowCount = Math.max(0, Math.min(size, totalRows - offset));
|
||||
if (pageRowCount > 0) {
|
||||
const tailOffset = Math.max(0, totalRows - (offset + pageRowCount));
|
||||
if (tailOffset < offset) {
|
||||
sql = buildPaginatedSelectSQL(dbType, baseSql, reverseOrderSQL, pageRowCount, tailOffset);
|
||||
useClickHouseReversePagination = true;
|
||||
clickHouseReverseLimit = pageRowCount;
|
||||
clickHouseReverseHasMore = currentPage < totalPages;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!useClickHouseReversePagination) {
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql = buildPaginatedSelectSQL(dbType, baseSql, orderBySQL, size + 1, offset);
|
||||
}
|
||||
}
|
||||
|
||||
const requestStartTime = Date.now();
|
||||
let executedSql = sql;
|
||||
try {
|
||||
const executeDataQuery = async (querySql: string, attemptLabel: string) => {
|
||||
const startTime = Date.now();
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
} catch (e: any) {
|
||||
const errMessage = String(e?.message || e || 'query failed');
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: `${attemptLabel}: ${errMessage}`,
|
||||
dbName
|
||||
});
|
||||
return { success: false, message: errMessage, data: [], fields: [] };
|
||||
}
|
||||
};
|
||||
|
||||
const hasSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
const isSortMemoryErr = (msg: string) => /error\s*1038|out of sort memory/i.test(String(msg || ''));
|
||||
let resData = await executeDataQuery(sql, '主查询');
|
||||
|
||||
if (!resData.success && dbTypeLower === 'duckdb' && isDuckDBUnsupportedTypeError(String(resData.message || ''))) {
|
||||
const cacheKey = `${tab.connectionId}|${dbName}|${tableName}`;
|
||||
let safeSelect = duckdbSafeSelectCacheRef.current[cacheKey] || '';
|
||||
if (!safeSelect) {
|
||||
try {
|
||||
const resCols = await DBGetColumns(config as any, dbName, tableName);
|
||||
if (resCols?.success && Array.isArray(resCols.data)) {
|
||||
const columnDefs = resCols.data as ColumnDefinition[];
|
||||
const selectParts = columnDefs.map((col) => {
|
||||
const colName = String(col?.name || '').trim();
|
||||
if (!colName) return '';
|
||||
const quotedCol = quoteIdentPart(dbType, colName);
|
||||
if (isDuckDBComplexColumnType(col?.type)) {
|
||||
return `CAST(${quotedCol} AS VARCHAR) AS ${quotedCol}`;
|
||||
}
|
||||
return quotedCol;
|
||||
}).filter(Boolean);
|
||||
if (selectParts.length > 0) {
|
||||
safeSelect = selectParts.join(', ');
|
||||
duckdbSafeSelectCacheRef.current[cacheKey] = safeSelect;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore and keep original error path
|
||||
}
|
||||
}
|
||||
|
||||
if (safeSelect) {
|
||||
let fallbackSql = `SELECT ${safeSelect} FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
fallbackSql = buildPaginatedSelectSQL(dbType, fallbackSql, buildOrderBySQL(dbType, sortInfo, pkColumns), size + 1, offset);
|
||||
executedSql = fallbackSql;
|
||||
resData = await executeDataQuery(fallbackSql, '复杂类型降级重试');
|
||||
}
|
||||
}
|
||||
|
||||
if (!resData.success && isMySQLFamily && hasSort && isSortMemoryErr(resData.message)) {
|
||||
const retrySql32MB = withSortBufferTuningSQL(dbType, sql, 32 * 1024 * 1024);
|
||||
if (retrySql32MB !== sql) {
|
||||
executedSql = retrySql32MB;
|
||||
resData = await executeDataQuery(retrySql32MB, '重试(32MB sort_buffer)');
|
||||
}
|
||||
if (!resData.success && isSortMemoryErr(resData.message)) {
|
||||
const retrySql128MB = withSortBufferTuningSQL(dbType, sql, 128 * 1024 * 1024);
|
||||
if (retrySql128MB !== executedSql) {
|
||||
executedSql = retrySql128MB;
|
||||
resData = await executeDataQuery(retrySql128MB, '重试(128MB sort_buffer)');
|
||||
}
|
||||
}
|
||||
if (resData.success) {
|
||||
message.warning('已自动提升排序缓冲并重试成功。');
|
||||
}
|
||||
}
|
||||
|
||||
if (pkColumns.length === 0) {
|
||||
const pkKey = `${tab.connectionId}|${dbName}|${tableName}`;
|
||||
@@ -117,7 +582,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
let resultData = resData.data as any[];
|
||||
if (!Array.isArray(resultData)) resultData = [];
|
||||
|
||||
const hasMore = resultData.length > size;
|
||||
if (useClickHouseReversePagination) {
|
||||
// 反向查询后恢复为原排序方向,保证用户看到的仍是“最后一页正序数据”。
|
||||
resultData = resultData.slice(0, clickHouseReverseLimit).reverse();
|
||||
}
|
||||
|
||||
const hasMore = useClickHouseReversePagination ? clickHouseReverseHasMore : resultData.length > size;
|
||||
if (hasMore) resultData = resultData.slice(0, size);
|
||||
|
||||
let fieldNames = resData.fields || [];
|
||||
@@ -132,26 +602,71 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setData(resultData);
|
||||
const countKey = `${tab.connectionId}|${dbName}|${tableName}|${whereSQL}`;
|
||||
const derivedTotalKnown = !hasMore;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : page * size + 1;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : currentPage * size + 1;
|
||||
const isDuckDB = dbTypeLower === 'duckdb';
|
||||
const minExpectedTotal = hasMore ? offset + resultData.length + 1 : offset + resultData.length;
|
||||
if (derivedTotalKnown) countKeyRef.current = countKey;
|
||||
latestConfigRef.current = config;
|
||||
latestDbTypeRef.current = dbTypeLower;
|
||||
latestDbNameRef.current = dbName;
|
||||
latestCountSqlRef.current = countSql;
|
||||
latestCountKeyRef.current = countKey;
|
||||
|
||||
setPagination(prev => {
|
||||
if (derivedTotalKnown) {
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: true };
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
if (prev.totalKnown && countKeyRef.current === countKey) {
|
||||
return { ...prev, current: page, pageSize: size };
|
||||
if (!isDuckDB) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
// 当当前页存在“下一页”信号时,已知总数至少应大于当前页末尾。
|
||||
// 若旧总数不满足该条件(例如历史统计值为 0),降级为未知总数并回退到 derivedTotal。
|
||||
if (Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
}
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: false };
|
||||
const keepManualCounting = prev.totalCountLoading && manualCountKeyRef.current === countKey;
|
||||
if (isDuckDB && prev.totalApprox && duckdbApproxKeyRef.current === countKey && Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: keepManualCounting ? false : prev.totalCountCancelled,
|
||||
};
|
||||
});
|
||||
|
||||
if (!derivedTotalKnown) {
|
||||
const shouldRunAsyncCount = !derivedTotalKnown && !isDuckDB;
|
||||
if (shouldRunAsyncCount) {
|
||||
if (countKeyRef.current !== countKey) {
|
||||
countKeyRef.current = countKey;
|
||||
const countSeq = ++countSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
// 大表 COUNT(*) 可能非常慢,且在部分运行时环境下会影响后续操作响应;
|
||||
// 这里为统计请求设置更短的超时,避免“后台统计”长期占用资源。
|
||||
// DuckDB 大文件场景下该统计会显著拖慢翻页,已禁用后台 COUNT。
|
||||
const countConfig: any = { ...(config as any), timeout: 5 };
|
||||
|
||||
DBQuery(countConfig, dbName, countSql)
|
||||
@@ -174,10 +689,17 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (!resCount.success) return;
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) return;
|
||||
|
||||
const total = Number(resCount.data[0]?.['total']);
|
||||
if (!Number.isFinite(total) || total < 0) return;
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) return;
|
||||
|
||||
setPagination(prev => ({ ...prev, total, totalKnown: true }));
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
})
|
||||
.catch(() => {
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
@@ -186,8 +708,52 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isDuckDB && !derivedTotalKnown && whereSQL.trim() === '' && duckdbApproxKeyRef.current !== countKey) {
|
||||
duckdbApproxKeyRef.current = countKey;
|
||||
const approxSeq = ++duckdbApproxSeqRef.current;
|
||||
const { schemaName, pureTableName } = resolveDuckDBSchemaAndTable(dbName, tableName);
|
||||
const escapedSchema = escapeSQLLiteral(schemaName);
|
||||
const escapedTable = escapeSQLLiteral(pureTableName);
|
||||
const approxConfig: any = { ...(config as any), timeout: 3 };
|
||||
const approxSqlCandidates = [
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE schema_name='${escapedSchema}' AND table_name='${escapedTable}' LIMIT 1`,
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE table_name='${escapedTable}' ORDER BY CASE WHEN schema_name='${escapedSchema}' THEN 0 ELSE 1 END LIMIT 1`,
|
||||
];
|
||||
|
||||
(async () => {
|
||||
for (const approxSql of approxSqlCandidates) {
|
||||
try {
|
||||
const approxRes = await DBQuery(approxConfig as any, dbName, approxSql);
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
if (!approxRes?.success || !Array.isArray(approxRes.data) || approxRes.data.length === 0) continue;
|
||||
|
||||
const approxTotal = parseDuckDBApproxTotalRow(approxRes.data[0]);
|
||||
if (approxTotal === null) continue;
|
||||
if (!Number.isFinite(approxTotal) || approxTotal < minExpectedTotal) continue;
|
||||
|
||||
setPagination(prev => {
|
||||
if (countKeyRef.current !== countKey) return prev;
|
||||
if (prev.totalKnown) return prev;
|
||||
return {
|
||||
...prev,
|
||||
total: approxTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
} else {
|
||||
message.error(resData.message);
|
||||
message.error(String(resData.message || '查询失败'));
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
@@ -195,42 +761,70 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-error`,
|
||||
timestamp: Date.now(),
|
||||
sql: sql,
|
||||
sql: executedSql,
|
||||
status: 'error',
|
||||
duration: Date.now() - startTime,
|
||||
duration: Date.now() - requestStartTime,
|
||||
message: e.message,
|
||||
dbName
|
||||
});
|
||||
}
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns.length]);
|
||||
// Depend on pkColumns.length to avoid loop? No, pkColumns is updated inside.
|
||||
// Actually, 'pkColumns' state shouldn't trigger re-fetch.
|
||||
// The 'if (pkColumns.length === 0)' check is inside.
|
||||
// So adding pkColumns to dependency is safer but might trigger double fetch if not careful?
|
||||
// Only if pkColumns changes. It changes once from [] to [...].
|
||||
// So it's fine.
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns, pagination.total, pagination.totalKnown]);
|
||||
// 依赖 pkColumns:在无手动排序时可回退到主键稳定排序。
|
||||
// 主键信息只会在首次加载后更新一次,避免循环查询。
|
||||
|
||||
// Handlers memoized
|
||||
const handleReload = useCallback(() => {
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
}, [fetchData, pagination.current, pagination.pageSize]);
|
||||
const handleSort = useCallback((field: string, order: string) => setSortInfo({ columnKey: field, order }), []);
|
||||
const handleSort = useCallback((field: string, order: string) => {
|
||||
const normalizedOrder = order === 'ascend' || order === 'descend' ? order : '';
|
||||
const normalizedField = String(field || '').trim();
|
||||
if (!normalizedField || !normalizedOrder) {
|
||||
setSortInfo(null);
|
||||
return;
|
||||
}
|
||||
setSortInfo({ columnKey: normalizedField, order: normalizedOrder });
|
||||
}, []);
|
||||
const handlePageChange = useCallback((page: number, size: number) => fetchData(page, size), [fetchData]);
|
||||
const handleToggleFilter = useCallback(() => setShowFilter(prev => !prev), []);
|
||||
const handleApplyFilter = useCallback((conditions: any[]) => setFilterConditions(conditions), []);
|
||||
const handleApplyFilter = useCallback((conditions: FilterCondition[]) => setFilterConditions(conditions), []);
|
||||
|
||||
const exportSqlWithFilter = useMemo(() => {
|
||||
const tableName = String(tab.tableName || '').trim();
|
||||
const dbType = String(currentConnConfig?.type || '').trim();
|
||||
if (!tableName || !dbType) return '';
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
if (!whereSQL) return '';
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const normalizedType = dbType.toLowerCase();
|
||||
const hasExplicitSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
if (hasExplicitSort && (normalizedType === 'mysql' || normalizedType === 'mariadb')) {
|
||||
sql = withSortBufferTuningSQL(normalizedType, sql, 32 * 1024 * 1024);
|
||||
}
|
||||
return sql;
|
||||
}, [tab.tableName, currentConnConfig?.type, filterConditions, sortInfo, pkColumns]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
if (!initialLoadRef.current) {
|
||||
initialLoadRef.current = true;
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
return;
|
||||
}
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
|
||||
return (
|
||||
<div style={{ flex: '1 1 auto', minHeight: 0, height: '100%', width: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
<div style={{ flex: '1 1 auto', minHeight: 0, minWidth: 0, height: '100%', width: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
<DataGrid
|
||||
data={data}
|
||||
columnNames={columnNames}
|
||||
loading={loading}
|
||||
tableName={tab.tableName}
|
||||
exportScope="table"
|
||||
dbName={tab.dbName}
|
||||
connectionId={tab.connectionId}
|
||||
pkColumns={pkColumns}
|
||||
@@ -238,9 +832,17 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
onSort={handleSort}
|
||||
onPageChange={handlePageChange}
|
||||
pagination={pagination}
|
||||
onRequestTotalCount={currentConnType === 'duckdb' ? handleDuckDBManualCount : undefined}
|
||||
onCancelTotalCount={currentConnType === 'duckdb' ? handleDuckDBCancelManualCount : undefined}
|
||||
showFilter={showFilter}
|
||||
onToggleFilter={handleToggleFilter}
|
||||
onApplyFilter={handleApplyFilter}
|
||||
appliedFilterConditions={filterConditions}
|
||||
readOnly={forceReadOnly}
|
||||
sortInfoExternal={sortInfo}
|
||||
exportSqlWithFilter={exportSqlWithFilter || undefined}
|
||||
scrollSnapshot={scrollSnapshotRef.current}
|
||||
onScrollSnapshotChange={handleTableScrollSnapshotChange}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
471
frontend/src/components/DefinitionViewer.tsx
Normal file
471
frontend/src/components/DefinitionViewer.tsx
Normal file
@@ -0,0 +1,471 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import Editor from '@monaco-editor/react';
|
||||
import { Spin, Alert } from 'antd';
|
||||
import { TabData } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery } from '../../wailsjs/go/app/App';
|
||||
|
||||
interface DefinitionViewerProps {
|
||||
tab: TabData;
|
||||
}
|
||||
|
||||
const DefinitionViewer: React.FC<DefinitionViewerProps> = ({ tab }) => {
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [definition, setDefinition] = useState<string>('');
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
|
||||
const escapeSQLLiteral = (raw: string): string => String(raw || '').replace(/'/g, "''");
|
||||
|
||||
const getMetadataDialect = (conn: any): string => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'custom') {
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
if (driver === 'diros' || driver === 'doris') return 'mysql';
|
||||
return driver;
|
||||
}
|
||||
if (type === 'mariadb' || type === 'diros' || type === 'sphinx') return 'mysql';
|
||||
if (type === 'dameng') return 'dm';
|
||||
return type;
|
||||
};
|
||||
|
||||
const isSphinxConnection = (conn: any): boolean => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'sphinx') return true;
|
||||
if (type !== 'custom') return false;
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
return driver === 'sphinx' || driver === 'sphinxql';
|
||||
};
|
||||
|
||||
const parseSchemaAndName = (fullName: string): { schema: string; name: string } => {
|
||||
const raw = String(fullName || '').trim();
|
||||
const idx = raw.lastIndexOf('.');
|
||||
if (idx > 0 && idx < raw.length - 1) {
|
||||
return { schema: raw.substring(0, idx), name: raw.substring(idx + 1) };
|
||||
}
|
||||
return { schema: '', name: raw };
|
||||
};
|
||||
|
||||
const getCaseInsensitiveRawValue = (row: Record<string, any>, candidateKeys: string[]): any => {
|
||||
const keyMap = new Map<string, any>();
|
||||
Object.keys(row || {}).forEach((key) => keyMap.set(key.toLowerCase(), row[key]));
|
||||
for (const key of candidateKeys) {
|
||||
const value = keyMap.get(key.toLowerCase());
|
||||
if (value !== undefined && value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const parseDuckDBParameterNames = (raw: any): string[] => {
|
||||
if (Array.isArray(raw)) {
|
||||
return raw
|
||||
.map((item) => String(item ?? '').trim())
|
||||
.filter((item) => item !== '' && item.toLowerCase() !== '<nil>');
|
||||
}
|
||||
const text = String(raw ?? '').trim();
|
||||
if (!text) return [];
|
||||
const normalized = text.startsWith('[') && text.endsWith(']')
|
||||
? text.slice(1, -1)
|
||||
: text;
|
||||
return normalized
|
||||
.split(',')
|
||||
.map((part) => part.trim())
|
||||
.filter((part) => part !== '' && part.toLowerCase() !== '<nil>');
|
||||
};
|
||||
|
||||
const buildDuckDBMacroDDL = (
|
||||
schemaName: string,
|
||||
functionName: string,
|
||||
parametersRaw: any,
|
||||
macroDefinitionRaw: any
|
||||
): string => {
|
||||
const schema = String(schemaName || '').trim();
|
||||
const name = String(functionName || '').trim();
|
||||
const macroDefinition = String(macroDefinitionRaw || '').trim();
|
||||
if (!name || !macroDefinition) return '';
|
||||
|
||||
const parameters = parseDuckDBParameterNames(parametersRaw).join(', ');
|
||||
const qualifiedName = schema ? `${schema}.${name}` : name;
|
||||
const isTableMacro = !macroDefinition.startsWith('(');
|
||||
if (isTableMacro) {
|
||||
return `CREATE OR REPLACE MACRO ${qualifiedName}(${parameters}) AS TABLE ${macroDefinition};`;
|
||||
}
|
||||
return `CREATE OR REPLACE MACRO ${qualifiedName}(${parameters}) AS ${macroDefinition};`;
|
||||
};
|
||||
|
||||
const buildShowViewQueries = (dialect: string, viewName: string, dbName: string): string[] => {
|
||||
const { schema, name } = parseSchemaAndName(viewName);
|
||||
const safeName = escapeSQLLiteral(name);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE VIEW \`${name.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT VIEW_DEFINITION AS view_definition FROM information_schema.views WHERE table_schema = '${safeDbName}' AND table_name = '${safeName}' LIMIT 1`
|
||||
: '',
|
||||
`SHOW CREATE TABLE \`${name.replace(/`/g, '``')}\``,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
const schemaRef = schema || 'public';
|
||||
return [`SELECT pg_get_viewdef('${escapeSQLLiteral(schemaRef)}.${safeName}'::regclass, true) AS view_definition`];
|
||||
}
|
||||
case 'sqlserver':
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${escapeSQLLiteral(viewName)}')) AS view_definition`];
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
if (schema) {
|
||||
return [`SELECT TEXT AS view_definition FROM ALL_VIEWS WHERE OWNER = '${escapeSQLLiteral(schema).toUpperCase()}' AND VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
}
|
||||
if (safeDbName) {
|
||||
return [`SELECT TEXT AS view_definition FROM ALL_VIEWS WHERE OWNER = '${safeDbName.toUpperCase()}' AND VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
}
|
||||
return [`SELECT TEXT AS view_definition FROM USER_VIEWS WHERE VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
case 'sqlite':
|
||||
return [`SELECT sql AS view_definition FROM sqlite_master WHERE type='view' AND name='${safeName}'`];
|
||||
case 'duckdb': {
|
||||
const schemaRef = schema || 'main';
|
||||
return [`SELECT view_definition FROM information_schema.views WHERE table_schema = '${escapeSQLLiteral(schemaRef)}' AND table_name = '${safeName}' LIMIT 1`];
|
||||
}
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的视图定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const buildShowRoutineQueries = (dialect: string, routineName: string, routineType: string, dbName: string): string[] => {
|
||||
const { schema, name } = parseSchemaAndName(routineName);
|
||||
const safeName = escapeSQLLiteral(name);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
const upperType = (routineType || 'FUNCTION').toUpperCase();
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE ${upperType} \`${name.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT ROUTINE_DEFINITION AS routine_definition, ROUTINE_TYPE AS routine_type FROM information_schema.routines WHERE routine_schema = '${safeDbName}' AND routine_name = '${safeName}' LIMIT 1`
|
||||
: '',
|
||||
upperType === 'PROCEDURE'
|
||||
? `SHOW PROCEDURE STATUS LIKE '${safeName}'`
|
||||
: `SHOW FUNCTION STATUS LIKE '${safeName}'`,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
const schemaRef = schema || 'public';
|
||||
return [`SELECT pg_get_functiondef(p.oid) AS routine_definition FROM pg_proc p JOIN pg_namespace n ON p.pronamespace = n.oid WHERE n.nspname = '${escapeSQLLiteral(schemaRef)}' AND p.proname = '${safeName}' LIMIT 1`];
|
||||
}
|
||||
case 'sqlserver':
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${escapeSQLLiteral(routineName)}')) AS routine_definition`];
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
const owner = schema ? escapeSQLLiteral(schema).toUpperCase() : (safeDbName ? safeDbName.toUpperCase() : '');
|
||||
if (owner) {
|
||||
return [`SELECT TEXT FROM ALL_SOURCE WHERE OWNER = '${owner}' AND NAME = '${safeName.toUpperCase()}' AND TYPE = '${upperType}' ORDER BY LINE`];
|
||||
}
|
||||
return [`SELECT TEXT FROM USER_SOURCE WHERE NAME = '${safeName.toUpperCase()}' AND TYPE = '${upperType}' ORDER BY LINE`];
|
||||
}
|
||||
case 'duckdb': {
|
||||
const schemaRef = schema || 'main';
|
||||
const safeSchema = escapeSQLLiteral(schemaRef);
|
||||
return [
|
||||
`SELECT schema_name, function_name, parameters, macro_definition FROM duckdb_functions() WHERE internal = false AND lower(function_type) = 'macro' AND schema_name = '${safeSchema}' AND function_name = '${safeName}' LIMIT 1`,
|
||||
`SELECT schema_name, function_name, parameters, macro_definition FROM duckdb_functions() WHERE internal = false AND lower(function_type) = 'macro' AND function_name = '${safeName}' ORDER BY CASE WHEN schema_name = '${safeSchema}' THEN 0 ELSE 1 END, schema_name LIMIT 1`,
|
||||
];
|
||||
}
|
||||
case 'sqlite':
|
||||
return [`-- SQLite 不支持函数/存储过程定义管理`];
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的函数/存储过程定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const runQueryCandidates = async (
|
||||
config: Record<string, any>,
|
||||
dbName: string,
|
||||
queries: string[]
|
||||
): Promise<{ success: boolean; data: any[]; message?: string }> => {
|
||||
let lastMessage = '';
|
||||
let hasSuccessfulQuery = false;
|
||||
for (const query of queries) {
|
||||
const sql = String(query || '').trim();
|
||||
if (!sql) continue;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, sql);
|
||||
if (!result.success || !Array.isArray(result.data)) {
|
||||
lastMessage = result.message || lastMessage;
|
||||
continue;
|
||||
}
|
||||
hasSuccessfulQuery = true;
|
||||
if (result.data.length > 0) {
|
||||
return { success: true, data: result.data };
|
||||
}
|
||||
} catch (error: any) {
|
||||
lastMessage = error?.message || String(error);
|
||||
}
|
||||
}
|
||||
if (hasSuccessfulQuery) {
|
||||
return { success: true, data: [] };
|
||||
}
|
||||
return { success: false, data: [], message: lastMessage };
|
||||
};
|
||||
|
||||
const getVersionHint = async (config: Record<string, any>, dbName: string): Promise<string> => {
|
||||
const candidates = [
|
||||
`SELECT VERSION() AS version`,
|
||||
`SHOW VARIABLES LIKE 'version'`,
|
||||
];
|
||||
for (const query of candidates) {
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, query);
|
||||
if (!result.success || !Array.isArray(result.data) || result.data.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const row = result.data[0] as Record<string, any>;
|
||||
const version =
|
||||
row.version
|
||||
|| row.VERSION
|
||||
|| row.Value
|
||||
|| row.value
|
||||
|| Object.values(row)[1]
|
||||
|| Object.values(row)[0];
|
||||
const text = String(version || '').trim();
|
||||
if (text) return text;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const extractViewDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) return '-- 未找到视图定义';
|
||||
const row = data[0];
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
const keys = Object.keys(row);
|
||||
const textDefinition = row.view_definition || row.VIEW_DEFINITION;
|
||||
if (textDefinition) return String(textDefinition);
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('create view') || k.toLowerCase() === 'create view');
|
||||
if (sqlKey) return row[sqlKey];
|
||||
const tableSqlKey = keys.find(k => k.toLowerCase().includes('create table'));
|
||||
if (tableSqlKey) return row[tableSqlKey];
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE') && (val.toUpperCase().includes('VIEW') || val.toUpperCase().includes('TABLE'))) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
return row.view_definition || row.VIEW_DEFINITION || row.text || row.TEXT || Object.values(row)[0] || '';
|
||||
default:
|
||||
return row.view_definition || row.VIEW_DEFINITION || row.sql || row.SQL || Object.values(row)[0] || '';
|
||||
}
|
||||
};
|
||||
|
||||
const extractRoutineDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) return '-- 未找到函数/存储过程定义';
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
const row = data[0];
|
||||
const keys = Object.keys(row);
|
||||
if (row.routine_definition || row.ROUTINE_DEFINITION) {
|
||||
return String(row.routine_definition || row.ROUTINE_DEFINITION);
|
||||
}
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('create function') || k.toLowerCase().includes('create procedure'));
|
||||
if (sqlKey) return row[sqlKey];
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE') && (val.toUpperCase().includes('FUNCTION') || val.toUpperCase().includes('PROCEDURE'))) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
const routineName = String(row.Name || row.name || '').trim();
|
||||
if (routineName) {
|
||||
const routineType = String(row.Type || row.type || row.ROUTINE_TYPE || row.routine_type || 'FUNCTION').trim().toUpperCase();
|
||||
return `-- 当前数据源未返回可执行定义文本,已返回元数据\n-- 名称: ${routineName}\n-- 类型: ${routineType}\n${JSON.stringify(row, null, 2)}`;
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
// Oracle/DM ALL_SOURCE returns multiple rows, one per line
|
||||
return data.map(row => row.text || row.TEXT || Object.values(row)[0] || '').join('');
|
||||
}
|
||||
case 'duckdb': {
|
||||
const row = data[0] as Record<string, any>;
|
||||
const ddl = buildDuckDBMacroDDL(
|
||||
String(getCaseInsensitiveRawValue(row, ['schema_name']) || '').trim(),
|
||||
String(getCaseInsensitiveRawValue(row, ['function_name', 'routine_name', 'name']) || '').trim(),
|
||||
getCaseInsensitiveRawValue(row, ['parameters']),
|
||||
getCaseInsensitiveRawValue(row, ['macro_definition'])
|
||||
);
|
||||
if (ddl) return ddl;
|
||||
const fallback = getCaseInsensitiveRawValue(row, ['macro_definition', 'routine_definition', 'definition']);
|
||||
if (fallback !== undefined && fallback !== null && String(fallback).trim() !== '') {
|
||||
return String(fallback);
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
default: {
|
||||
const row = data[0];
|
||||
return row.routine_definition || row.ROUTINE_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const loadDefinition = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
setError('未找到数据库连接');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const dialect = getMetadataDialect(conn);
|
||||
const sphinxLike = isSphinxConnection(conn) && dialect === 'mysql';
|
||||
|
||||
let queries: string[];
|
||||
let extractFn: (dialect: string, data: any[]) => string;
|
||||
let objectLabel: string;
|
||||
|
||||
if (tab.type === 'view-def') {
|
||||
const viewName = tab.viewName || '';
|
||||
if (!viewName) {
|
||||
setError('视图名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
queries = buildShowViewQueries(dialect, viewName, dbName);
|
||||
extractFn = extractViewDefinition;
|
||||
objectLabel = '视图';
|
||||
} else {
|
||||
const routineName = tab.routineName || '';
|
||||
const routineType = tab.routineType || 'FUNCTION';
|
||||
if (!routineName) {
|
||||
setError('函数/存储过程名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
queries = buildShowRoutineQueries(dialect, routineName, routineType, dbName);
|
||||
extractFn = extractRoutineDefinition;
|
||||
objectLabel = '函数/存储过程';
|
||||
}
|
||||
|
||||
if (!queries.length || String(queries[0] || '').startsWith('--')) {
|
||||
setDefinition(String(queries[0] || '-- 暂不支持该对象定义查看'));
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const result = await runQueryCandidates(config, dbName, queries);
|
||||
|
||||
if (result.success && Array.isArray(result.data) && result.data.length > 0) {
|
||||
const def = extractFn(dialect, result.data);
|
||||
setDefinition(def);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.success) {
|
||||
if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setDefinition(`-- 当前 Sphinx 实例${versionText}未返回${objectLabel}定义。\n-- 已执行多套兼容查询,可能是版本能力限制或对象类型不支持。`);
|
||||
return;
|
||||
}
|
||||
setDefinition(`-- 未找到${objectLabel}定义`);
|
||||
} else if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setDefinition(`-- 当前 Sphinx 实例${versionText}不支持${objectLabel}定义查询。\n-- 已自动尝试兼容语句,返回失败信息: ${result.message || 'unknown error'}`);
|
||||
} else {
|
||||
setError(result.message || '查询定义失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('查询定义失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadDefinition();
|
||||
}, [tab.connectionId, tab.dbName, tab.viewName, tab.routineName, tab.routineType, tab.type, connections]);
|
||||
|
||||
const objectLabel = tab.type === 'view-def' ? '视图' : '函数/存储过程';
|
||||
const objectName = tab.type === 'view-def' ? tab.viewName : tab.routineName;
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
<Spin tip={`加载${objectLabel}定义...`} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div style={{ padding: 16 }}>
|
||||
<Alert type="error" message="加载失败" description={error} showIcon />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px 16px', borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0' }}>
|
||||
<strong>{objectLabel}: </strong>{objectName}
|
||||
{tab.dbName && <span style={{ marginLeft: 16, color: '#888' }}>数据库: {tab.dbName}</span>}
|
||||
{tab.routineType && <span style={{ marginLeft: 16, color: '#888' }}>类型: {tab.routineType}</span>}
|
||||
</div>
|
||||
<div style={{ flex: 1, minHeight: 0 }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={definition}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DefinitionViewer;
|
||||
1446
frontend/src/components/DriverManagerModal.tsx
Normal file
1446
frontend/src/components/DriverManagerModal.tsx
Normal file
File diff suppressed because it is too large
Load Diff
250
frontend/src/components/ImportPreviewModal.tsx
Normal file
250
frontend/src/components/ImportPreviewModal.tsx
Normal file
@@ -0,0 +1,250 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Modal, Table, Alert, Progress, Button, Space } from 'antd';
|
||||
import { CheckCircleOutlined, CloseCircleOutlined } from '@ant-design/icons';
|
||||
import { PreviewImportFile, ImportDataWithProgress } from '../../wailsjs/go/app/App';
|
||||
import { EventsOn, EventsOff } from '../../wailsjs/runtime/runtime';
|
||||
import { useStore } from '../store';
|
||||
|
||||
interface ImportPreviewModalProps {
|
||||
visible: boolean;
|
||||
filePath: string;
|
||||
connectionId: string;
|
||||
dbName: string;
|
||||
tableName: string;
|
||||
onClose: () => void;
|
||||
onSuccess: () => void;
|
||||
}
|
||||
|
||||
interface PreviewData {
|
||||
columns: string[];
|
||||
totalRows: number;
|
||||
previewRows: any[];
|
||||
}
|
||||
|
||||
interface ImportProgress {
|
||||
current: number;
|
||||
total: number;
|
||||
success: number;
|
||||
errors: number;
|
||||
}
|
||||
|
||||
const ImportPreviewModal: React.FC<ImportPreviewModalProps> = ({
|
||||
visible,
|
||||
filePath,
|
||||
connectionId,
|
||||
dbName,
|
||||
tableName,
|
||||
onClose,
|
||||
onSuccess
|
||||
}) => {
|
||||
const connections = useStore(state => state.connections);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [previewData, setPreviewData] = useState<PreviewData | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [importing, setImporting] = useState(false);
|
||||
const [progress, setProgress] = useState<ImportProgress | null>(null);
|
||||
const [importResult, setImportResult] = useState<any>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (visible && filePath) {
|
||||
loadPreview();
|
||||
}
|
||||
}, [visible, filePath]);
|
||||
|
||||
useEffect(() => {
|
||||
if (importing) {
|
||||
const unsubscribe = EventsOn('import:progress', (data: ImportProgress) => {
|
||||
setProgress(data);
|
||||
});
|
||||
return () => {
|
||||
EventsOff('import:progress');
|
||||
};
|
||||
}
|
||||
}, [importing]);
|
||||
|
||||
const loadPreview = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const res = await PreviewImportFile(filePath);
|
||||
if (res.success && res.data) {
|
||||
setPreviewData({
|
||||
columns: res.data.columns || [],
|
||||
totalRows: res.data.totalRows || 0,
|
||||
previewRows: res.data.previewRows || []
|
||||
});
|
||||
} else {
|
||||
setError(res.message || '预览失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('预览失败: ' + e.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleImport = async () => {
|
||||
if (!previewData) return;
|
||||
|
||||
setImporting(true);
|
||||
setProgress({ current: 0, total: previewData.totalRows, success: 0, errors: 0 });
|
||||
setImportResult(null);
|
||||
|
||||
try {
|
||||
const conn = connections.find(c => c.id === connectionId);
|
||||
if (!conn) {
|
||||
setError('连接配置未找到');
|
||||
setImporting(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const res = await ImportDataWithProgress(config as any, dbName, tableName, filePath);
|
||||
|
||||
if (res.success && res.data) {
|
||||
setImportResult(res.data);
|
||||
if (res.data.failed === 0) {
|
||||
onSuccess();
|
||||
}
|
||||
} else {
|
||||
setError(res.message || '导入失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('导入失败: ' + e.message);
|
||||
} finally {
|
||||
setImporting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const columns = previewData?.columns.map(col => ({
|
||||
title: col,
|
||||
dataIndex: col,
|
||||
key: col,
|
||||
ellipsis: true,
|
||||
width: 150
|
||||
})) || [];
|
||||
|
||||
const progressPercent = progress ? Math.round((progress.current / progress.total) * 100) : 0;
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title="导入数据预览"
|
||||
open={visible}
|
||||
onCancel={onClose}
|
||||
width={900}
|
||||
footer={
|
||||
importResult ? (
|
||||
<Space>
|
||||
<Button onClick={onClose}>关闭</Button>
|
||||
</Space>
|
||||
) : importing ? null : (
|
||||
<Space>
|
||||
<Button onClick={onClose}>取消</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleImport}
|
||||
disabled={!previewData || loading}
|
||||
>
|
||||
开始导入
|
||||
</Button>
|
||||
</Space>
|
||||
)
|
||||
}
|
||||
>
|
||||
{error && <Alert type="error" message={error} style={{ marginBottom: 16 }} showIcon />}
|
||||
|
||||
{loading && <div style={{ textAlign: 'center', padding: 40 }}>加载预览数据...</div>}
|
||||
|
||||
{!loading && previewData && !importing && !importResult && (
|
||||
<>
|
||||
<Alert
|
||||
type="info"
|
||||
message={`共 ${previewData.totalRows} 行数据,${previewData.columns.length} 个字段`}
|
||||
description='以下是前 5 行预览数据,确认无误后点击“开始导入”'
|
||||
style={{ marginBottom: 16 }}
|
||||
showIcon
|
||||
/>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600 }}>字段列表:</div>
|
||||
<div style={{ marginBottom: 16, padding: 8, background: '#f5f5f5', borderRadius: 4 }}>
|
||||
{previewData.columns.join(', ')}
|
||||
</div>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600 }}>数据预览(前 5 行):</div>
|
||||
<Table
|
||||
dataSource={previewData.previewRows}
|
||||
columns={columns}
|
||||
pagination={false}
|
||||
scroll={{ x: 'max-content' }}
|
||||
size="small"
|
||||
bordered
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{importing && progress && (
|
||||
<div style={{ padding: '40px 20px' }}>
|
||||
<div style={{ marginBottom: 16, fontSize: 16, fontWeight: 600, textAlign: 'center' }}>
|
||||
正在导入数据...
|
||||
</div>
|
||||
<Progress percent={progressPercent} status="active" />
|
||||
<div style={{ marginTop: 16, textAlign: 'center', color: '#666' }}>
|
||||
已处理 {progress.current} / {progress.total} 行
|
||||
<span style={{ marginLeft: 16, color: '#52c41a' }}>
|
||||
<CheckCircleOutlined /> 成功 {progress.success}
|
||||
</span>
|
||||
{progress.errors > 0 && (
|
||||
<span style={{ marginLeft: 16, color: '#ff4d4f' }}>
|
||||
<CloseCircleOutlined /> 失败 {progress.errors}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importResult && (
|
||||
<div style={{ padding: 20 }}>
|
||||
<Alert
|
||||
type={importResult.failed === 0 ? 'success' : 'warning'}
|
||||
message="导入完成"
|
||||
description={
|
||||
<div>
|
||||
<div>成功导入 {importResult.success} 行</div>
|
||||
{importResult.failed > 0 && <div>失败 {importResult.failed} 行</div>}
|
||||
</div>
|
||||
}
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
{importResult.errorLogs && importResult.errorLogs.length > 0 && (
|
||||
<>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600, color: '#ff4d4f' }}>错误日志:</div>
|
||||
<div style={{
|
||||
maxHeight: 300,
|
||||
overflow: 'auto',
|
||||
background: '#fff1f0',
|
||||
border: '1px solid #ffccc7',
|
||||
borderRadius: 4,
|
||||
padding: 12,
|
||||
fontSize: 12,
|
||||
fontFamily: 'monospace'
|
||||
}}>
|
||||
{importResult.errorLogs.map((log: string, idx: number) => (
|
||||
<div key={idx} style={{ marginBottom: 4 }}>{log}</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default ImportPreviewModal;
|
||||
@@ -1,7 +1,8 @@
|
||||
import React, { useRef, useEffect } from 'react';
|
||||
import { Table, Tag, Button, Tooltip } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, CaretRightOutlined, BugOutlined } from '@ant-design/icons';
|
||||
import { Table, Tag, Button, Tooltip, Empty } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, BugOutlined, ClockCircleOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
interface LogPanelProps {
|
||||
height: number;
|
||||
@@ -15,32 +16,52 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const opacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
|
||||
// Background Helper
|
||||
const getBg = (darkHex: string) => {
|
||||
if (!darkMode) return `rgba(255, 255, 255, ${appearance.opacity ?? 0.95})`;
|
||||
if (!darkMode) return `rgba(255, 255, 255, ${opacity})`;
|
||||
const hex = darkHex.replace('#', '');
|
||||
const r = parseInt(hex.substring(0, 2), 16);
|
||||
const g = parseInt(hex.substring(2, 4), 16);
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${appearance.opacity ?? 0.95})`;
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
||||
const bgMain = getBg('#1f1f1f');
|
||||
const bgToolbar = getBg('#2a2a2a');
|
||||
const bgMain = getBg('#1d1d1d');
|
||||
const shellOpacity = darkMode ? Math.max(0.18, opacity * 0.82) : Math.max(0.28, opacity * 0.92);
|
||||
const shellOpacityStrong = darkMode ? Math.max(0.22, opacity * 0.9) : Math.max(0.34, opacity * 0.96);
|
||||
const panelDividerColor = darkMode
|
||||
? `rgba(255,255,255,${Math.max(0.04, opacity * 0.10)})`
|
||||
: `rgba(0,0,0,${Math.max(0.04, opacity * 0.08)})`;
|
||||
const panelMutedTextColor = darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(0,0,0,0.58)';
|
||||
const panelShellBg = darkMode
|
||||
? `linear-gradient(180deg, rgba(15,20,30,${shellOpacity}) 0%, rgba(9,13,22,${shellOpacityStrong}) 100%)`
|
||||
: `linear-gradient(180deg, rgba(255,255,255,${shellOpacityStrong}) 0%, rgba(246,248,252,${shellOpacity}) 100%)`;
|
||||
const panelAccentColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
const panelShadow = darkMode
|
||||
? `0 12px 28px rgba(0,0,0,${Math.max(0.05, opacity * 0.18)})`
|
||||
: `0 12px 24px rgba(15,23,42,${Math.max(0.02, opacity * 0.08)})`;
|
||||
const logScrollbarThumb = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.18, opacity * 0.34)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.12, opacity * 0.26)})`;
|
||||
const logScrollbarThumbHover = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.28, opacity * 0.48)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.18, opacity * 0.36)})`;
|
||||
|
||||
const columns = [
|
||||
{
|
||||
title: 'Time',
|
||||
dataIndex: 'timestamp',
|
||||
width: 80,
|
||||
render: (ts: number) => <span style={{ color: '#888', fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
render: (ts: number) => <span style={{ color: panelMutedTextColor, fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
},
|
||||
{
|
||||
title: 'Status',
|
||||
dataIndex: 'status',
|
||||
width: 70,
|
||||
render: (status: string) => (
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0 }}>
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0, borderRadius: 999, paddingInline: 8, fontSize: 11, fontWeight: 700 }}>
|
||||
{status === 'success' ? 'OK' : 'ERR'}
|
||||
</Tag>
|
||||
)
|
||||
@@ -55,10 +76,10 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
title: 'SQL / Message',
|
||||
dataIndex: 'sql',
|
||||
render: (text: string, record: any) => (
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.2' }}>
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.45' }}>
|
||||
<div style={{ color: darkMode ? '#a6e22e' : '#005cc5' }}>{text}</div>
|
||||
{record.message && <div style={{ color: '#ff4d4f', marginTop: 2 }}>{record.message}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: '#888', marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: panelMutedTextColor, marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -67,13 +88,18 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return (
|
||||
<div style={{
|
||||
height,
|
||||
borderTop: 'none',
|
||||
background: bgMain,
|
||||
backdropFilter: `blur(${appearance.blur ?? 0}px)`,
|
||||
margin: 0,
|
||||
border: `1px solid ${panelDividerColor}`,
|
||||
borderRadius: 14,
|
||||
background: panelShellBg,
|
||||
WebkitBackdropFilter: opacity < 0.999 ? 'blur(14px)' : 'none',
|
||||
boxShadow: panelShadow,
|
||||
backdropFilter: darkMode && opacity < 0.999 ? 'blur(18px)' : 'none',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
position: 'relative',
|
||||
zIndex: 100 // Ensure above other content
|
||||
overflow: 'hidden',
|
||||
zIndex: 100
|
||||
}}>
|
||||
{/* Resize Handle */}
|
||||
<div
|
||||
@@ -91,38 +117,93 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
|
||||
{/* Toolbar */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
borderBottom: 'none',
|
||||
padding: '10px 14px',
|
||||
borderBottom: `1px solid ${panelDividerColor}`,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
height: 32
|
||||
gap: 12,
|
||||
minHeight: 48
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, fontWeight: 'bold', fontSize: '12px' }}>
|
||||
<BugOutlined /> SQL 执行日志
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 10, minWidth: 0 }}>
|
||||
<div style={{ width: 30, height: 30, borderRadius: 10, display: 'grid', placeItems: 'center', background: darkMode ? `rgba(255,214,102,${Math.max(0.10, Math.min(0.18, opacity * 0.18))})` : `rgba(24,144,255,${Math.max(0.08, Math.min(0.16, opacity * 0.16))})`, color: panelAccentColor, flexShrink: 0 }}>
|
||||
<BugOutlined />
|
||||
</div>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontWeight: 700, fontSize: 13, color: darkMode ? '#f5f7ff' : '#162033' }}>SQL 执行日志</div>
|
||||
<div style={{ fontSize: 12, color: panelMutedTextColor }}>记录执行状态、耗时与错误信息,便于快速回溯。</div>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<Tooltip title="清空日志">
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} />
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
<Tooltip title="关闭面板">
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} />
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* List */}
|
||||
<div style={{ flex: 1, overflow: 'auto' }}>
|
||||
<Table
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
// scroll={{ y: height - 32 }} // Let flex handle it
|
||||
/>
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto', padding: '8px 10px 10px' }}>
|
||||
{sqlLogs.length === 0 ? (
|
||||
<div style={{ height: '100%', minHeight: 160, display: 'grid', placeItems: 'center' }}>
|
||||
<Empty
|
||||
image={Empty.PRESENTED_IMAGE_SIMPLE}
|
||||
description={<span style={{ color: panelMutedTextColor }}>暂无 SQL 执行日志</span>}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<style>{`
|
||||
.log-panel-scroll {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: ${logScrollbarThumb} transparent;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-track,
|
||||
.log-panel-scroll::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-thumb {
|
||||
background: ${logScrollbarThumb};
|
||||
border-radius: 8px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: padding-box;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-thumb:hover {
|
||||
background: ${logScrollbarThumbHover};
|
||||
background-clip: padding-box;
|
||||
}
|
||||
.log-panel-table .ant-table,
|
||||
.log-panel-table .ant-table-container,
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
background: transparent !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
padding: 8px 10px !important;
|
||||
border-bottom: 1px solid ${panelDividerColor} !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr:last-child > td {
|
||||
border-bottom: none !important;
|
||||
}
|
||||
.log-panel-table .ant-table-row:hover > td {
|
||||
background: ${darkMode ? 'rgba(255,255,255,0.03)' : 'rgba(16,24,40,0.03)'} !important;
|
||||
}
|
||||
`}</style>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import Editor, { OnMount } from '@monaco-editor/react';
|
||||
import { Button, message, Modal, Input, Form, Dropdown, MenuProps, Tooltip, Select, Tabs } from 'antd';
|
||||
import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutlined, CloseOutlined } from '@ant-design/icons';
|
||||
import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutlined, CloseOutlined, StopOutlined } from '@ant-design/icons';
|
||||
import { format } from 'sql-formatter';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import { DBQuery, DBQueryWithCancel, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns, CancelQuery, GenerateQueryID } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
import { convertMongoShellToJsonCommand } from '../utils/mongodb';
|
||||
import { getShortcutDisplay, isEditableElement, isShortcutMatch } from '../utils/shortcuts';
|
||||
|
||||
const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [query, setQuery] = useState(tab.query || 'SELECT * FROM ');
|
||||
@@ -14,6 +18,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
type ResultSet = {
|
||||
key: string;
|
||||
sql: string;
|
||||
exportSql?: string;
|
||||
rows: any[];
|
||||
columns: string[];
|
||||
tableName?: string;
|
||||
@@ -28,7 +33,9 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [activeResultKey, setActiveResultKey] = useState<string>('');
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [currentQueryId, setCurrentQueryId] = useState<string>('');
|
||||
const runSeqRef = useRef(0);
|
||||
const currentQueryIdRef = useRef('');
|
||||
const [isSaveModalOpen, setIsSaveModalOpen] = useState(false);
|
||||
const [saveForm] = Form.useForm();
|
||||
|
||||
@@ -41,12 +48,17 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [editorHeight, setEditorHeight] = useState(300);
|
||||
const editorRef = useRef<any>(null);
|
||||
const monacoRef = useRef<any>(null);
|
||||
const lastExternalQueryRef = useRef<string>(tab.query || '');
|
||||
const dragRef = useRef<{ startY: number, startHeight: number } | null>(null);
|
||||
const tablesRef = useRef<{dbName: string, tableName: string}[]>([]); // Store tables for autocomplete (cross-db)
|
||||
const allColumnsRef = useRef<{dbName: string, tableName: string, name: string, type: string}[]>([]); // Store all columns (cross-db)
|
||||
const visibleDbsRef = useRef<string[]>([]); // Store visible databases for cross-db intellisense
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const queryCapableConnections = useMemo(
|
||||
() => connections.filter(c => getDataSourceCapabilities(c.config).supportsQueryEditor),
|
||||
[connections]
|
||||
);
|
||||
const addSqlLog = useStore(state => state.addSqlLog);
|
||||
const currentConnectionIdRef = useRef(currentConnectionId);
|
||||
const currentDbRef = useRef(currentDb);
|
||||
@@ -59,11 +71,23 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const setSqlFormatOptions = useStore(state => state.setSqlFormatOptions);
|
||||
const queryOptions = useStore(state => state.queryOptions);
|
||||
const setQueryOptions = useStore(state => state.setQueryOptions);
|
||||
const shortcutOptions = useStore(state => state.shortcutOptions);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
|
||||
useEffect(() => {
|
||||
currentConnectionIdRef.current = currentConnectionId;
|
||||
}, [currentConnectionId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!queryCapableConnections.some(c => c.id === currentConnectionId)) {
|
||||
const fallback = queryCapableConnections[0]?.id || '';
|
||||
if (fallback && fallback !== currentConnectionId) {
|
||||
setCurrentConnectionId(fallback);
|
||||
setCurrentDb('');
|
||||
}
|
||||
}
|
||||
}, [queryCapableConnections, currentConnectionId]);
|
||||
|
||||
useEffect(() => {
|
||||
currentDbRef.current = currentDb;
|
||||
}, [currentDb]);
|
||||
@@ -72,10 +96,30 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
connectionsRef.current = connections;
|
||||
}, [connections]);
|
||||
|
||||
const getCurrentQuery = () => {
|
||||
const val = editorRef.current?.getValue?.();
|
||||
if (typeof val === 'string') return val;
|
||||
return query || '';
|
||||
};
|
||||
|
||||
const syncQueryToEditor = (sql: string) => {
|
||||
const next = sql || '';
|
||||
setQuery(next);
|
||||
const editor = editorRef.current;
|
||||
if (editor && editor.getValue?.() !== next) {
|
||||
editor.setValue(next);
|
||||
}
|
||||
};
|
||||
|
||||
// If opening a saved query, load its SQL
|
||||
useEffect(() => {
|
||||
if (tab.query) setQuery(tab.query);
|
||||
}, [tab.query]);
|
||||
const incoming = tab.query || '';
|
||||
if (incoming === lastExternalQueryRef.current) {
|
||||
return;
|
||||
}
|
||||
lastExternalQueryRef.current = incoming;
|
||||
syncQueryToEditor(incoming || 'SELECT * FROM ');
|
||||
}, [tab.id, tab.query]);
|
||||
|
||||
// Fetch Database List
|
||||
useEffect(() => {
|
||||
@@ -170,6 +214,17 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
fetchMetadata();
|
||||
}, [currentConnectionId, connections, dbList]); // dbList 变化时触发重新加载
|
||||
|
||||
// Query ID management helpers
|
||||
const setQueryId = (id: string) => {
|
||||
currentQueryIdRef.current = id;
|
||||
setCurrentQueryId(id);
|
||||
};
|
||||
|
||||
const clearQueryId = () => {
|
||||
currentQueryIdRef.current = '';
|
||||
setCurrentQueryId('');
|
||||
};
|
||||
|
||||
// Handle Resizing
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
@@ -196,6 +251,9 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
editorRef.current = editor;
|
||||
monacoRef.current = monaco;
|
||||
|
||||
// 应用透明主题(主题已在 main.tsx 全局注册)
|
||||
monaco.editor.setTheme(darkMode ? 'transparent-dark' : 'transparent-light');
|
||||
|
||||
monaco.languages.registerCompletionItemProvider('sql', {
|
||||
triggerCharacters: ['.'],
|
||||
provideCompletionItems: async (model: any, position: any) => {
|
||||
@@ -235,6 +293,19 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
return parts[parts.length - 1] || raw;
|
||||
};
|
||||
|
||||
const splitSchemaAndTable = (qualified: string): { schema: string; table: string } => {
|
||||
const raw = normalizeQualifiedName(qualified);
|
||||
if (!raw) return { schema: '', table: '' };
|
||||
const parts = raw.split('.').filter(Boolean);
|
||||
if (parts.length >= 2) {
|
||||
return {
|
||||
schema: parts[parts.length - 2] || '',
|
||||
table: parts[parts.length - 1] || '',
|
||||
};
|
||||
}
|
||||
return { schema: '', table: parts[0] || '' };
|
||||
};
|
||||
|
||||
const buildConnConfig = () => {
|
||||
const connId = currentConnectionIdRef.current;
|
||||
const conn = connectionsRef.current.find(c => c.id === connId);
|
||||
@@ -307,13 +378,14 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (qualifierMatch) {
|
||||
const qualifier = stripQuotes(qualifierMatch[1]);
|
||||
const prefix = (qualifierMatch[2] || '').toLowerCase();
|
||||
const qualifierLower = qualifier.toLowerCase();
|
||||
|
||||
// 首先检查 qualifier 是否是数据库名(跨库表提示)
|
||||
const visibleDbs = visibleDbsRef.current;
|
||||
if (visibleDbs.some(db => db.toLowerCase() === qualifier.toLowerCase())) {
|
||||
if (visibleDbs.some(db => db.toLowerCase() === qualifierLower)) {
|
||||
// qualifier 是数据库名,提示该库的表
|
||||
const tables = tablesRef.current.filter(t =>
|
||||
(t.dbName || '').toLowerCase() === qualifier.toLowerCase()
|
||||
(t.dbName || '').toLowerCase() === qualifierLower
|
||||
);
|
||||
const filtered = prefix
|
||||
? tables.filter(t => (t.tableName || '').toLowerCase().startsWith(prefix))
|
||||
@@ -330,6 +402,34 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
// qualifier 是 schema(如 dbo/public)时,仅补全表名,避免输入 dbo. 后再补成 dbo.dbo.table
|
||||
const schemaTables = tablesRef.current
|
||||
.map(t => {
|
||||
const parsed = splitSchemaAndTable(t.tableName || '');
|
||||
return {
|
||||
dbName: t.dbName || '',
|
||||
schema: parsed.schema,
|
||||
table: parsed.table,
|
||||
};
|
||||
})
|
||||
.filter(t => t.schema.toLowerCase() === qualifierLower && !!t.table);
|
||||
|
||||
if (schemaTables.length > 0) {
|
||||
const filtered = prefix
|
||||
? schemaTables.filter(t => t.table.toLowerCase().startsWith(prefix))
|
||||
: schemaTables;
|
||||
|
||||
const suggestions = filtered.map(t => ({
|
||||
label: t.table,
|
||||
kind: monaco.languages.CompletionItemKind.Class,
|
||||
insertText: t.table,
|
||||
detail: `Table (${t.dbName}${t.schema ? '.' + t.schema : ''})`,
|
||||
range,
|
||||
sortText: '0' + t.table
|
||||
}));
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
// 否则检查是否是表别名或表名,提示列
|
||||
const reserved = new Set([
|
||||
'where', 'on', 'group', 'order', 'limit', 'having',
|
||||
@@ -478,8 +578,8 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const handleFormat = () => {
|
||||
try {
|
||||
const formatted = format(query, { language: 'mysql', keywordCase: sqlFormatOptions.keywordCase });
|
||||
setQuery(formatted);
|
||||
const formatted = format(getCurrentQuery(), { language: 'mysql', keywordCase: sqlFormatOptions.keywordCase });
|
||||
syncQueryToEditor(formatted);
|
||||
} catch (e) {
|
||||
message.error("格式化失败: SQL 语法可能有误");
|
||||
}
|
||||
@@ -498,6 +598,12 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
icon: sqlFormatOptions.keywordCase === 'lower' ? '✓' : undefined,
|
||||
onClick: () => setSqlFormatOptions({ keywordCase: 'lower' })
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: 'shortcut-settings',
|
||||
label: '快捷键管理...',
|
||||
onClick: () => window.dispatchEvent(new CustomEvent('gonavi:open-shortcut-settings')),
|
||||
},
|
||||
];
|
||||
|
||||
const splitSQLStatements = (sql: string): string[] => {
|
||||
@@ -919,7 +1025,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const applyAutoLimit = (sql: string, dbType: string, maxRows: number): { sql: string; applied: boolean; maxRows: number } => {
|
||||
const normalizedType = (dbType || 'mysql').toLowerCase();
|
||||
const supportsLimit = normalizedType === 'mysql' || normalizedType === 'postgres' || normalizedType === 'kingbase' || normalizedType === 'sqlite' || normalizedType === '';
|
||||
const supportsLimit = normalizedType === 'mysql' || normalizedType === 'mariadb' || normalizedType === 'diros' || normalizedType === 'sphinx' || normalizedType === 'postgres' || normalizedType === 'kingbase' || normalizedType === 'sqlite' || normalizedType === 'duckdb' || normalizedType === 'tdengine' || normalizedType === 'clickhouse' || normalizedType === '';
|
||||
if (!supportsLimit) return { sql, applied: false, maxRows };
|
||||
if (!Number.isFinite(maxRows) || maxRows <= 0) return { sql, applied: false, maxRows };
|
||||
|
||||
@@ -960,11 +1066,22 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
};
|
||||
|
||||
const handleRun = async () => {
|
||||
if (!query.trim()) return;
|
||||
const currentQuery = getCurrentQuery();
|
||||
if (!currentQuery.trim()) return;
|
||||
if (!currentDb) {
|
||||
message.error("请先选择数据库");
|
||||
return;
|
||||
}
|
||||
// 如果已有查询在运行,先取消它
|
||||
if (currentQueryIdRef.current) {
|
||||
try {
|
||||
await CancelQuery(currentQueryIdRef.current);
|
||||
} catch (error) {
|
||||
// 忽略取消错误,可能查询已完成
|
||||
}
|
||||
// 清除旧查询ID
|
||||
clearQueryId();
|
||||
}
|
||||
const runSeq = ++runSeqRef.current;
|
||||
setLoading(true);
|
||||
const runStartTime = Date.now();
|
||||
@@ -974,6 +1091,12 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
const connCaps = getDataSourceCapabilities(conn.config);
|
||||
if (!connCaps.supportsQueryEditor) {
|
||||
message.error("当前数据源不支持 SQL 查询编辑器,请使用对应专用页面。");
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
@@ -985,8 +1108,16 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
};
|
||||
|
||||
try {
|
||||
const rawSQL = getSelectedSQL() || query;
|
||||
const statements = splitSQLStatements(rawSQL);
|
||||
const rawSQL = getSelectedSQL() || currentQuery;
|
||||
const dbType = String((config as any).type || 'mysql');
|
||||
const normalizedDbType = dbType.trim().toLowerCase();
|
||||
const normalizedRawSQL = String(rawSQL || '').replace(/;/g, ';');
|
||||
const splitInput = normalizedDbType === 'mongodb'
|
||||
? normalizedRawSQL
|
||||
.replace(/^\s*\/\/.*$/gm, '')
|
||||
.replace(/^\s*#.*$/gm, '')
|
||||
: normalizedRawSQL;
|
||||
const statements = splitSQLStatements(splitInput);
|
||||
if (statements.length === 0) {
|
||||
message.info('没有可执行的 SQL。');
|
||||
setResultSets([]);
|
||||
@@ -996,7 +1127,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const nextResultSets: ResultSet[] = [];
|
||||
const maxRows = Number(queryOptions?.maxRows) || 0;
|
||||
const dbType = String((config as any).type || 'mysql');
|
||||
const forceReadOnlyResult = connCaps.forceReadOnlyQueryResult;
|
||||
const wantsLimitProbe = Number.isFinite(maxRows) && maxRows > 0;
|
||||
const probeLimit = wantsLimitProbe ? (maxRows + 1) : 0;
|
||||
let anyTruncated = false;
|
||||
@@ -1009,9 +1140,35 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const limitApplied = shouldAutoLimit && wantsLimitProbe;
|
||||
const limited = limitApplied ? applyAutoLimit(rawStatement, dbType, probeLimit) : { sql: rawStatement, applied: false, maxRows: probeLimit };
|
||||
const executedSql = limited.sql;
|
||||
let executedSql = limited.sql;
|
||||
if (String(dbType || '').trim().toLowerCase() === 'mongodb') {
|
||||
const shellConvert = convertMongoShellToJsonCommand(executedSql);
|
||||
if (shellConvert.recognized) {
|
||||
if (shellConvert.error) {
|
||||
const prefix = statements.length > 1 ? `第 ${idx + 1} 条语句执行失败:` : '';
|
||||
message.error(prefix + shellConvert.error);
|
||||
setResultSets([]);
|
||||
setActiveResultKey('');
|
||||
return;
|
||||
}
|
||||
if (shellConvert.command) {
|
||||
executedSql = shellConvert.command;
|
||||
}
|
||||
}
|
||||
}
|
||||
const startTime = Date.now();
|
||||
const res = await DBQuery(config as any, currentDb, executedSql);
|
||||
|
||||
// Generate query ID for cancellation using backend UUID with fallback
|
||||
let queryId: string;
|
||||
try {
|
||||
queryId = await GenerateQueryID();
|
||||
} catch (error) {
|
||||
console.warn('GenerateQueryID failed, using local UUID fallback:', error);
|
||||
queryId = 'query-' + uuidv4();
|
||||
}
|
||||
setQueryId(queryId);
|
||||
|
||||
const res = await DBQueryWithCancel(config as any, currentDb, executedSql, queryId);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
addSqlLog({
|
||||
@@ -1026,6 +1183,32 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
|
||||
if (!res.success) {
|
||||
// 检查是否为查询取消错误
|
||||
const errorMsg = res.message.toLowerCase();
|
||||
const isCancelledError = errorMsg.includes('context canceled') ||
|
||||
errorMsg.includes('查询已取消') ||
|
||||
errorMsg.includes('canceled') ||
|
||||
errorMsg.includes('cancelled') ||
|
||||
errorMsg.includes('statement canceled') ||
|
||||
errorMsg.includes('sql: statement canceled');
|
||||
|
||||
// 确保不是超时错误
|
||||
const isTimeoutError = errorMsg.includes('context deadline exceeded') ||
|
||||
errorMsg.includes('timeout') ||
|
||||
errorMsg.includes('超时') ||
|
||||
errorMsg.includes('deadline exceeded');
|
||||
|
||||
if (isCancelledError && !isTimeoutError) {
|
||||
// 查询已被用户取消,不显示错误消息,清理状态
|
||||
setResultSets([]);
|
||||
setActiveResultKey('');
|
||||
// 清除查询ID,与handleCancel保持一致
|
||||
if (currentQueryIdRef.current) {
|
||||
clearQueryId();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const prefix = statements.length > 1 ? `第 ${idx + 1} 条语句执行失败:` : '';
|
||||
message.error(prefix + res.message);
|
||||
setResultSets([]);
|
||||
@@ -1053,12 +1236,15 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const tableMatch = rawStatement.match(/^\s*SELECT\s+\*\s+FROM\s+[`"]?(\w+)[`"]?\s*(?:WHERE.*)?(?:ORDER BY.*)?(?:LIMIT.*)?$/i);
|
||||
if (tableMatch) {
|
||||
simpleTableName = tableMatch[1];
|
||||
pendingPk.push({ resultKey: `result-${idx + 1}`, tableName: simpleTableName });
|
||||
if (!forceReadOnlyResult) {
|
||||
pendingPk.push({ resultKey: `result-${idx + 1}`, tableName: simpleTableName });
|
||||
}
|
||||
}
|
||||
|
||||
nextResultSets.push({
|
||||
key: `result-${idx + 1}`,
|
||||
sql: rawStatement,
|
||||
exportSql: limited.applied ? applyAutoLimit(rawStatement, dbType, Math.max(1, Number(maxRows) || 1)).sql : rawStatement,
|
||||
rows,
|
||||
columns: cols,
|
||||
tableName: simpleTableName,
|
||||
@@ -1075,6 +1261,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
nextResultSets.push({
|
||||
key: `result-${idx + 1}`,
|
||||
sql: rawStatement,
|
||||
exportSql: rawStatement,
|
||||
rows: [row],
|
||||
columns: ['affectedRows'],
|
||||
pkColumns: [],
|
||||
@@ -1127,16 +1314,82 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setActiveResultKey('');
|
||||
} finally {
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
// Clear query ID after execution completes
|
||||
clearQueryId();
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancel = async () => {
|
||||
if (!currentQueryIdRef.current) {
|
||||
message.warning('没有正在运行的查询可取消');
|
||||
return;
|
||||
}
|
||||
const queryIdToCancel = currentQueryIdRef.current;
|
||||
try {
|
||||
const res = await CancelQuery(queryIdToCancel);
|
||||
if (res.success) {
|
||||
message.success('查询已取消');
|
||||
// Clear query ID after successful cancellation
|
||||
if (currentQueryIdRef.current === queryIdToCancel) {
|
||||
clearQueryId()
|
||||
}
|
||||
} else {
|
||||
message.warning(res.message);
|
||||
}
|
||||
} catch (error: any) {
|
||||
message.error('取消查询失败: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const binding = shortcutOptions.runQuery;
|
||||
if (!binding?.enabled || !binding.combo) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handleRunShortcut = (event: KeyboardEvent) => {
|
||||
if (activeTabId !== tab.id) {
|
||||
return;
|
||||
}
|
||||
if (!isShortcutMatch(event, binding.combo)) {
|
||||
return;
|
||||
}
|
||||
const editorHasFocus = !!editorRef.current?.hasTextFocus?.();
|
||||
if (!editorHasFocus && !isEditableElement(event.target)) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
void handleRun();
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleRunShortcut);
|
||||
return () => {
|
||||
window.removeEventListener('keydown', handleRunShortcut);
|
||||
};
|
||||
}, [activeTabId, tab.id, shortcutOptions.runQuery, handleRun]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleRunActiveQuery = () => {
|
||||
if (activeTabId !== tab.id) {
|
||||
return;
|
||||
}
|
||||
void handleRun();
|
||||
};
|
||||
|
||||
window.addEventListener('gonavi:run-active-query', handleRunActiveQuery as EventListener);
|
||||
return () => {
|
||||
window.removeEventListener('gonavi:run-active-query', handleRunActiveQuery as EventListener);
|
||||
};
|
||||
}, [activeTabId, tab.id, handleRun]);
|
||||
|
||||
const handleSave = async () => {
|
||||
try {
|
||||
const values = await saveForm.validateFields();
|
||||
saveQuery({
|
||||
id: tab.id.startsWith('saved-') ? tab.id : `saved-${Date.now()}`,
|
||||
name: values.name,
|
||||
sql: query,
|
||||
sql: getCurrentQuery(),
|
||||
connectionId: currentConnectionId,
|
||||
dbName: currentDb || tab.dbName || '',
|
||||
createdAt: Date.now()
|
||||
@@ -1207,7 +1460,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
transition: none !important;
|
||||
}
|
||||
`}</style>
|
||||
<div style={{ padding: '8px', borderBottom: '1px solid #eee', display: 'flex', gap: '8px', flexShrink: 0, alignItems: 'center' }}>
|
||||
<div style={{ padding: '8px', display: 'flex', gap: '8px', flexShrink: 0, alignItems: 'center' }}>
|
||||
<Select
|
||||
style={{ width: 150 }}
|
||||
placeholder="选择连接"
|
||||
@@ -1216,7 +1469,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setCurrentConnectionId(val);
|
||||
setCurrentDb('');
|
||||
}}
|
||||
options={connections.map(c => ({ label: c.name, value: c.id }))}
|
||||
options={queryCapableConnections.map(c => ({ label: c.name, value: c.id }))}
|
||||
showSearch
|
||||
/>
|
||||
<Select
|
||||
@@ -1241,9 +1494,24 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
]}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Button type="primary" icon={<PlayCircleOutlined />} onClick={handleRun} loading={loading}>
|
||||
运行
|
||||
</Button>
|
||||
<Button.Group>
|
||||
<Tooltip
|
||||
title={
|
||||
shortcutOptions.runQuery?.enabled && shortcutOptions.runQuery?.combo
|
||||
? `运行(${getShortcutDisplay(shortcutOptions.runQuery.combo)})`
|
||||
: '运行'
|
||||
}
|
||||
>
|
||||
<Button type="primary" icon={<PlayCircleOutlined />} onClick={handleRun} loading={loading}>
|
||||
运行
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{loading && (
|
||||
<Button type="primary" danger icon={<StopOutlined />} onClick={handleCancel}>
|
||||
停止
|
||||
</Button>
|
||||
)}
|
||||
</Button.Group>
|
||||
<Button icon={<SaveOutlined />} onClick={() => {
|
||||
saveForm.setFieldsValue({ name: tab.title.replace('Query (', '').replace(')', '') });
|
||||
setIsSaveModalOpen(true);
|
||||
@@ -1261,12 +1529,12 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
</Button.Group>
|
||||
</div>
|
||||
|
||||
<div style={{ height: editorHeight, minHeight: '100px', borderBottom: '1px solid #eee' }}>
|
||||
<div style={{ height: editorHeight, minHeight: '100px' }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
defaultLanguage="sql"
|
||||
theme={darkMode ? "vs-dark" : "light"}
|
||||
value={query}
|
||||
theme={darkMode ? "transparent-dark" : "transparent-light"}
|
||||
defaultValue={query}
|
||||
onChange={(val) => setQuery(val || '')}
|
||||
onMount={handleEditorDidMount}
|
||||
options={{
|
||||
@@ -1283,7 +1551,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
style={{
|
||||
height: '5px',
|
||||
cursor: 'row-resize',
|
||||
background: darkMode ? '#333' : '#f0f0f0',
|
||||
background: darkMode ? 'rgba(255,255,255,0.08)' : 'rgba(0,0,0,0.04)',
|
||||
flexShrink: 0,
|
||||
zIndex: 10
|
||||
}}
|
||||
@@ -1326,6 +1594,8 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
columnNames={rs.columns}
|
||||
loading={loading}
|
||||
tableName={rs.tableName}
|
||||
exportScope="queryResult"
|
||||
resultSql={rs.exportSql || rs.sql}
|
||||
dbName={currentDb}
|
||||
connectionId={currentConnectionId}
|
||||
pkColumns={rs.pkColumns}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,91 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import React, { useMemo, useRef, useState } from 'react';
|
||||
import { Tabs, Dropdown } from 'antd';
|
||||
import type { MenuProps } from 'antd';
|
||||
import type { MenuProps, TabsProps } from 'antd';
|
||||
import { DndContext, PointerSensor, closestCenter, useSensor, useSensors } from '@dnd-kit/core';
|
||||
import type { DragStartEvent, DragEndEvent } from '@dnd-kit/core';
|
||||
import { SortableContext, useSortable, horizontalListSortingStrategy } from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import { restrictToHorizontalAxis } from '@dnd-kit/modifiers';
|
||||
import { useStore } from '../store';
|
||||
import DataViewer from './DataViewer';
|
||||
import QueryEditor from './QueryEditor';
|
||||
import TableDesigner from './TableDesigner';
|
||||
import RedisViewer from './RedisViewer';
|
||||
import RedisCommandEditor from './RedisCommandEditor';
|
||||
import TriggerViewer from './TriggerViewer';
|
||||
import DefinitionViewer from './DefinitionViewer';
|
||||
import type { TabData } from '../types';
|
||||
|
||||
const detectConnectionEnvLabel = (connectionName: string): string | null => {
|
||||
const tokens = connectionName.toLowerCase().split(/[^a-z0-9]+/).filter(Boolean);
|
||||
if (tokens.includes('prod') || tokens.includes('production')) return 'PROD';
|
||||
if (tokens.includes('uat')) return 'UAT';
|
||||
if (tokens.includes('dev') || tokens.includes('development')) return 'DEV';
|
||||
if (tokens.includes('sit')) return 'SIT';
|
||||
if (tokens.includes('stg') || tokens.includes('stage') || tokens.includes('staging') || tokens.includes('pre')) return 'STG';
|
||||
if (tokens.includes('test') || tokens.includes('qa')) return 'TEST';
|
||||
return null;
|
||||
};
|
||||
|
||||
const buildTabDisplayTitle = (tab: TabData, connectionName: string | undefined): string => {
|
||||
if (tab.type !== 'table' && tab.type !== 'design') return tab.title;
|
||||
if (!connectionName) return tab.title;
|
||||
const prefix = detectConnectionEnvLabel(connectionName) || connectionName;
|
||||
return `[${prefix}] ${tab.title}`;
|
||||
};
|
||||
|
||||
type SortableTabLabelProps = {
|
||||
displayTitle: string;
|
||||
menuItems: MenuProps['items'];
|
||||
};
|
||||
|
||||
const SortableTabLabel: React.FC<SortableTabLabelProps> = ({
|
||||
displayTitle,
|
||||
menuItems,
|
||||
}) => {
|
||||
return (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span
|
||||
className="tab-dnd-label"
|
||||
onContextMenu={(e) => e.preventDefault()}
|
||||
title="拖拽调整标签顺序"
|
||||
>
|
||||
{displayTitle}
|
||||
</span>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
||||
type DraggableTabNodeProps = {
|
||||
node: React.ReactElement;
|
||||
};
|
||||
|
||||
const DraggableTabNode: React.FC<DraggableTabNodeProps> = ({ node }) => {
|
||||
const tabId = String(node.key || '').trim();
|
||||
const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id: tabId });
|
||||
const style: React.CSSProperties = {
|
||||
...(node.props.style || {}),
|
||||
transform: CSS.Transform.toString(transform),
|
||||
transition: transition || 'transform 180ms cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
opacity: isDragging ? 0.88 : 1,
|
||||
cursor: isDragging ? 'grabbing' : 'grab',
|
||||
touchAction: 'none',
|
||||
zIndex: isDragging ? 2 : node.props.style?.zIndex,
|
||||
};
|
||||
|
||||
return React.cloneElement(node, {
|
||||
ref: setNodeRef,
|
||||
style,
|
||||
...attributes,
|
||||
...listeners,
|
||||
className: `${node.props.className || ''} tab-dnd-node${isDragging ? ' is-dragging' : ''}`,
|
||||
});
|
||||
};
|
||||
|
||||
const TabManager: React.FC = () => {
|
||||
const tabs = useStore(state => state.tabs);
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
const setActiveTab = useStore(state => state.setActiveTab);
|
||||
const closeTab = useStore(state => state.closeTab);
|
||||
@@ -17,6 +93,15 @@ const TabManager: React.FC = () => {
|
||||
const closeTabsToLeft = useStore(state => state.closeTabsToLeft);
|
||||
const closeTabsToRight = useStore(state => state.closeTabsToRight);
|
||||
const closeAllTabs = useStore(state => state.closeAllTabs);
|
||||
const moveTab = useStore(state => state.moveTab);
|
||||
const tabsNavBorderColor = theme === 'dark' ? 'rgba(255, 255, 255, 0.09)' : 'rgba(0, 0, 0, 0.08)';
|
||||
const [draggingTabId, setDraggingTabId] = useState<string | null>(null);
|
||||
const suppressClickUntilRef = useRef<number>(0);
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor, {
|
||||
activationConstraint: { distance: 8 },
|
||||
})
|
||||
);
|
||||
|
||||
const onChange = (newActiveKey: string) => {
|
||||
setActiveTab(newActiveKey);
|
||||
@@ -28,9 +113,43 @@ const TabManager: React.FC = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleDragStart = (event: DragStartEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
setDraggingTabId(sourceId || null);
|
||||
};
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
const targetId = String(event.over?.id || '').trim();
|
||||
setDraggingTabId(null);
|
||||
if (!sourceId || !targetId || sourceId === targetId) {
|
||||
return;
|
||||
}
|
||||
suppressClickUntilRef.current = Date.now() + 120;
|
||||
moveTab(sourceId, targetId);
|
||||
};
|
||||
|
||||
const handleDragCancel = () => {
|
||||
setDraggingTabId(null);
|
||||
};
|
||||
|
||||
const tabIds = useMemo(() => tabs.map((tab) => tab.id), [tabs]);
|
||||
|
||||
const renderTabBar: TabsProps['renderTabBar'] = (tabBarProps, DefaultTabBar) => (
|
||||
<DefaultTabBar {...tabBarProps}>
|
||||
{(node) => <DraggableTabNode key={node.key} node={node} />}
|
||||
</DefaultTabBar>
|
||||
);
|
||||
|
||||
const items = useMemo(() => tabs.map((tab, index) => {
|
||||
const connectionName = connections.find((conn) => conn.id === tab.connectionId)?.name;
|
||||
const displayTitle = buildTabDisplayTitle(tab, connectionName);
|
||||
const keepMountedWhenInactive = tab.type === 'query' || tab.type === 'redis-command';
|
||||
const shouldRenderContent = activeTabId === tab.id || keepMountedWhenInactive;
|
||||
let content;
|
||||
if (tab.type === 'query') {
|
||||
if (!shouldRenderContent) {
|
||||
content = null;
|
||||
} else if (tab.type === 'query') {
|
||||
content = <QueryEditor tab={tab} />;
|
||||
} else if (tab.type === 'table') {
|
||||
content = <DataViewer tab={tab} />;
|
||||
@@ -40,6 +159,10 @@ const TabManager: React.FC = () => {
|
||||
content = <RedisViewer connectionId={tab.connectionId} redisDB={tab.redisDB ?? 0} />;
|
||||
} else if (tab.type === 'redis-command') {
|
||||
content = <RedisCommandEditor connectionId={tab.connectionId} redisDB={tab.redisDB ?? 0} />;
|
||||
} else if (tab.type === 'trigger') {
|
||||
content = <TriggerViewer tab={tab} />;
|
||||
} else if (tab.type === 'view-def' || tab.type === 'routine-def') {
|
||||
content = <DefinitionViewer tab={tab} />;
|
||||
}
|
||||
|
||||
const menuItems: MenuProps['items'] = [
|
||||
@@ -72,14 +195,15 @@ const TabManager: React.FC = () => {
|
||||
|
||||
return {
|
||||
label: (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span onContextMenu={(e) => e.preventDefault()}>{tab.title}</span>
|
||||
</Dropdown>
|
||||
<SortableTabLabel
|
||||
displayTitle={displayTitle}
|
||||
menuItems={menuItems}
|
||||
/>
|
||||
),
|
||||
key: tab.id,
|
||||
children: content,
|
||||
};
|
||||
}), [tabs, closeOtherTabs, closeTabsToLeft, closeTabsToRight, closeAllTabs]);
|
||||
}), [tabs, connections, activeTabId, closeOtherTabs, closeTabsToLeft, closeTabsToRight, closeAllTabs]);
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -88,6 +212,7 @@ const TabManager: React.FC = () => {
|
||||
height: 100%;
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
@@ -98,6 +223,7 @@ const TabManager: React.FC = () => {
|
||||
.main-tabs .ant-tabs-content-holder {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -105,12 +231,14 @@ const TabManager: React.FC = () => {
|
||||
.main-tabs .ant-tabs-content {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.main-tabs .ant-tabs-tabpane {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
@@ -118,23 +246,69 @@ const TabManager: React.FC = () => {
|
||||
.main-tabs .ant-tabs-tabpane > div {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
}
|
||||
.main-tabs .ant-tabs-tabpane-hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-nav::before {
|
||||
border-bottom: none !important;
|
||||
border-bottom: 1px solid ${tabsNavBorderColor} !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-tab {
|
||||
transition: transform 180ms cubic-bezier(0.22, 1, 0.36, 1), background-color 120ms ease;
|
||||
}
|
||||
.main-tabs .tab-dnd-label {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
max-width: 100%;
|
||||
}
|
||||
.main-tabs .tab-dnd-node.is-dragging,
|
||||
.main-tabs .tab-dnd-node.is-dragging .tab-dnd-label {
|
||||
cursor: grabbing !important;
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(255, 214, 102, 0.72);
|
||||
background: rgba(255, 214, 102, 0.16);
|
||||
}
|
||||
body[data-theme='light'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(9, 109, 217, 0.32);
|
||||
background: rgba(9, 109, 217, 0.08);
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab.ant-tabs-tab-active {
|
||||
background: rgba(255, 214, 102, 0.12) !important;
|
||||
border-color: rgba(255, 214, 102, 0.4) !important;
|
||||
}
|
||||
`}</style>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={onChange}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
/>
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={closestCenter}
|
||||
modifiers={[restrictToHorizontalAxis]}
|
||||
onDragStart={handleDragStart}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDragCancel={handleDragCancel}
|
||||
>
|
||||
<SortableContext items={tabIds} strategy={horizontalListSortingStrategy}>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={(newActiveKey) => {
|
||||
if (Date.now() < suppressClickUntilRef.current) return;
|
||||
onChange(newActiveKey);
|
||||
}}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
renderTabBar={renderTabBar}
|
||||
/>
|
||||
</SortableContext>
|
||||
</DndContext>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
340
frontend/src/components/TriggerViewer.tsx
Normal file
340
frontend/src/components/TriggerViewer.tsx
Normal file
@@ -0,0 +1,340 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import Editor, { loader } from '@monaco-editor/react';
|
||||
import { Spin, Alert } from 'antd';
|
||||
import { TabData } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery } from '../../wailsjs/go/app/App';
|
||||
|
||||
interface TriggerViewerProps {
|
||||
tab: TabData;
|
||||
}
|
||||
|
||||
const TriggerViewer: React.FC<TriggerViewerProps> = ({ tab }) => {
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [triggerDefinition, setTriggerDefinition] = useState<string>('');
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
|
||||
// 初始化透明 Monaco Editor 主题
|
||||
useEffect(() => {
|
||||
loader.init().then(monaco => {
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#ffffff10',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
monaco.editor.defineTheme('transparent-light', {
|
||||
base: 'vs',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#00000010',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
});
|
||||
}, []);
|
||||
|
||||
const escapeSQLLiteral = (raw: string): string => String(raw || '').replace(/'/g, "''");
|
||||
const quoteSqlServerIdentifier = (raw: string): string => `[${String(raw || '').replace(/]/g, ']]')}]`;
|
||||
|
||||
const getMetadataDialect = (conn: any): string => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'custom') {
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
if (driver === 'diros' || driver === 'doris') return 'mysql';
|
||||
return driver;
|
||||
}
|
||||
if (type === 'mariadb' || type === 'diros' || type === 'sphinx') return 'mysql';
|
||||
if (type === 'dameng') return 'dm';
|
||||
return type;
|
||||
};
|
||||
|
||||
const isSphinxConnection = (conn: any): boolean => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'sphinx') return true;
|
||||
if (type !== 'custom') return false;
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
return driver === 'sphinx' || driver === 'sphinxql';
|
||||
};
|
||||
|
||||
const buildShowTriggerQueries = (dialect: string, triggerName: string, dbName: string): string[] => {
|
||||
const safeTriggerName = escapeSQLLiteral(triggerName);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE TRIGGER \`${triggerName.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT ACTION_STATEMENT AS trigger_definition FROM information_schema.triggers WHERE trigger_schema = '${safeDbName}' AND trigger_name = '${safeTriggerName}' LIMIT 1`
|
||||
: '',
|
||||
safeDbName
|
||||
? `SHOW TRIGGERS FROM \`${dbName.replace(/`/g, '``')}\` LIKE '${safeTriggerName}'`
|
||||
: `SHOW TRIGGERS LIKE '${safeTriggerName}'`,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase':
|
||||
return [`SELECT pg_get_triggerdef(t.oid, true) AS trigger_definition
|
||||
FROM pg_trigger t
|
||||
JOIN pg_class c ON t.tgrelid = c.oid
|
||||
WHERE t.tgname = '${safeTriggerName}'
|
||||
AND NOT t.tgisinternal
|
||||
LIMIT 1`];
|
||||
case 'sqlserver': {
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${safeTriggerName.replace(/'/g, "''")}')) AS trigger_definition`];
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
if (!safeDbName) {
|
||||
return [`SELECT TRIGGER_BODY FROM USER_TRIGGERS WHERE TRIGGER_NAME = '${safeTriggerName.toUpperCase()}'`];
|
||||
}
|
||||
return [`SELECT TRIGGER_BODY FROM ALL_TRIGGERS WHERE OWNER = '${safeDbName.toUpperCase()}' AND TRIGGER_NAME = '${safeTriggerName.toUpperCase()}'`];
|
||||
case 'sqlite':
|
||||
return [`SELECT sql FROM sqlite_master WHERE type = 'trigger' AND name = '${safeTriggerName}'`];
|
||||
case 'duckdb':
|
||||
return [`-- DuckDB 不支持触发器`];
|
||||
case 'tdengine':
|
||||
return [`-- TDengine 不支持触发器`];
|
||||
case 'mongodb':
|
||||
return [`-- MongoDB 不支持触发器`];
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的触发器定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const runQueryCandidates = async (
|
||||
config: Record<string, any>,
|
||||
dbName: string,
|
||||
queries: string[]
|
||||
): Promise<{ success: boolean; data: any[]; message?: string }> => {
|
||||
let lastMessage = '';
|
||||
let hasSuccessfulQuery = false;
|
||||
for (const query of queries) {
|
||||
const sql = String(query || '').trim();
|
||||
if (!sql) continue;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, sql);
|
||||
if (!result.success || !Array.isArray(result.data)) {
|
||||
lastMessage = result.message || lastMessage;
|
||||
continue;
|
||||
}
|
||||
hasSuccessfulQuery = true;
|
||||
if (result.data.length > 0) {
|
||||
return { success: true, data: result.data };
|
||||
}
|
||||
} catch (error: any) {
|
||||
lastMessage = error?.message || String(error);
|
||||
}
|
||||
}
|
||||
if (hasSuccessfulQuery) {
|
||||
return { success: true, data: [] };
|
||||
}
|
||||
return { success: false, data: [], message: lastMessage };
|
||||
};
|
||||
|
||||
const getVersionHint = async (config: Record<string, any>, dbName: string): Promise<string> => {
|
||||
const candidates = [
|
||||
`SELECT VERSION() AS version`,
|
||||
`SHOW VARIABLES LIKE 'version'`,
|
||||
];
|
||||
for (const query of candidates) {
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, query);
|
||||
if (!result.success || !Array.isArray(result.data) || result.data.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const row = result.data[0] as Record<string, any>;
|
||||
const version =
|
||||
row.version
|
||||
|| row.VERSION
|
||||
|| row.Value
|
||||
|| row.value
|
||||
|| Object.values(row)[1]
|
||||
|| Object.values(row)[0];
|
||||
const text = String(version || '').trim();
|
||||
if (text) return text;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const extractTriggerDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) {
|
||||
return '-- 未找到触发器定义';
|
||||
}
|
||||
|
||||
const row = data[0];
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
// MySQL SHOW CREATE TRIGGER returns: Trigger, sql_mode, SQL Original Statement, ...
|
||||
const keys = Object.keys(row);
|
||||
if (row.trigger_definition || row.TRIGGER_DEFINITION) {
|
||||
return String(row.trigger_definition || row.TRIGGER_DEFINITION);
|
||||
}
|
||||
if (row.ACTION_STATEMENT || row.action_statement) {
|
||||
return String(row.ACTION_STATEMENT || row.action_statement);
|
||||
}
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('statement') || k.toLowerCase() === 'sql original statement');
|
||||
if (sqlKey) return row[sqlKey];
|
||||
// Fallback: try to find any key containing CREATE TRIGGER
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE TRIGGER')) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
return row.trigger_definition || row.TRIGGER_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'sqlserver': {
|
||||
return row.trigger_definition || row.TRIGGER_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
return row.trigger_body || row.TRIGGER_BODY || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'sqlite': {
|
||||
return row.sql || row.SQL || Object.values(row)[0] || '';
|
||||
}
|
||||
default:
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const loadTriggerDefinition = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
setError('未找到数据库连接');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const triggerName = tab.triggerName || '';
|
||||
const dbName = tab.dbName || '';
|
||||
|
||||
if (!triggerName) {
|
||||
setError('触发器名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const dialect = getMetadataDialect(conn);
|
||||
const queries = buildShowTriggerQueries(dialect, triggerName, dbName);
|
||||
const sphinxLike = isSphinxConnection(conn) && dialect === 'mysql';
|
||||
|
||||
if (!queries.length || String(queries[0] || '').startsWith('--')) {
|
||||
setTriggerDefinition(String(queries[0] || '-- 暂不支持该数据库类型的触发器定义查看'));
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const result = await runQueryCandidates(config, dbName, queries);
|
||||
|
||||
if (result.success && Array.isArray(result.data) && result.data.length > 0) {
|
||||
const definition = extractTriggerDefinition(dialect, result.data);
|
||||
setTriggerDefinition(definition);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.success) {
|
||||
if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setTriggerDefinition(`-- 当前 Sphinx 实例${versionText}未返回触发器定义。\n-- 已执行多套兼容查询,可能是版本能力限制或对象类型不支持。`);
|
||||
return;
|
||||
}
|
||||
setTriggerDefinition('-- 未找到触发器定义');
|
||||
} else if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setTriggerDefinition(`-- 当前 Sphinx 实例${versionText}不支持触发器定义查询。\n-- 已自动尝试兼容语句,返回失败信息: ${result.message || 'unknown error'}`);
|
||||
} else {
|
||||
setError(result.message || '查询触发器定义失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('查询触发器定义失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadTriggerDefinition();
|
||||
}, [tab.connectionId, tab.dbName, tab.triggerName, connections]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
<Spin tip="加载触发器定义..." />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div style={{ padding: 16 }}>
|
||||
<Alert type="error" message="加载失败" description={error} showIcon />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px 16px', borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0' }}>
|
||||
<strong>触发器: </strong>{tab.triggerName}
|
||||
{tab.dbName && <span style={{ marginLeft: 16, color: '#888' }}>数据库: {tab.dbName}</span>}
|
||||
</div>
|
||||
<div style={{ flex: 1, minHeight: 0 }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={triggerDefinition}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default TriggerViewer;
|
||||
@@ -3,6 +3,52 @@ import ReactDOM from 'react-dom/client'
|
||||
import App from './App'
|
||||
// import './index.css' // Optional global styles
|
||||
|
||||
// 全局配置 Monaco Editor 使用本地打包的文件,避免从 CDN (jsdelivr) 加载。
|
||||
// Windows WebView2 环境下访问外部 CDN 可能失败,导致编辑器一直显示 Loading。
|
||||
import { loader } from '@monaco-editor/react'
|
||||
import * as monaco from 'monaco-editor'
|
||||
loader.config({ monaco })
|
||||
|
||||
if (typeof window !== 'undefined' && !(window as any).go) {
|
||||
(window as any).go = {
|
||||
app: {
|
||||
App: {
|
||||
CheckUpdate: async () => ({ success: false }),
|
||||
DownloadUpdate: async () => ({ success: false }),
|
||||
GetSavedConnections: async () => [],
|
||||
SaveConnection: async () => null,
|
||||
DeleteConnection: async () => null,
|
||||
OpenConnection: async () => null,
|
||||
CloseConnection: async () => null,
|
||||
GetDatabases: async () => [],
|
||||
GetTables: async () => [],
|
||||
GetTableData: async () => ({ columns: [], rows: [], total: 0 }),
|
||||
GetTableColumns: async () => [],
|
||||
ExecuteQuery: async () => ({ columns: [], rows: [], time: 0 }),
|
||||
GetSavedQueries: async () => [],
|
||||
SaveQuery: async () => null,
|
||||
DeleteQuery: async () => null,
|
||||
GetAppInfo: async () => ({}),
|
||||
CheckForUpdates: async () => ({ success: false }),
|
||||
OpenDownloadedUpdateDirectory: async () => ({ success: false }),
|
||||
InstallUpdateAndRestart: async () => ({ success: false }),
|
||||
ImportConfigFile: async () => ({ success: false }),
|
||||
ExportData: async () => ({ success: false }),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// 全局注册透明主题,避免每个 Editor 组件 beforeMount 中重复定义
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark', inherit: true, rules: [],
|
||||
colors: { 'editor.background': '#00000000', 'editor.lineHighlightBackground': '#ffffff10', 'editorGutter.background': '#00000000' }
|
||||
})
|
||||
monaco.editor.defineTheme('transparent-light', {
|
||||
base: 'vs', inherit: true, rules: [],
|
||||
colors: { 'editor.background': '#00000000', 'editor.lineHighlightBackground': '#00000010', 'editorGutter.background': '#00000000' }
|
||||
})
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
|
||||
@@ -1,6 +1,380 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { SavedConnection, TabData, SavedQuery } from './types';
|
||||
import { ConnectionConfig, ProxyConfig, SavedConnection, TabData, SavedQuery, ConnectionTag } from './types';
|
||||
import {
|
||||
ShortcutAction,
|
||||
ShortcutBinding,
|
||||
ShortcutOptions,
|
||||
DEFAULT_SHORTCUT_OPTIONS,
|
||||
cloneShortcutOptions,
|
||||
sanitizeShortcutOptions,
|
||||
} from './utils/shortcuts';
|
||||
|
||||
const DEFAULT_APPEARANCE = { enabled: true, opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_UI_SCALE = 1.0;
|
||||
const MIN_UI_SCALE = 0.8;
|
||||
const MAX_UI_SCALE = 1.25;
|
||||
const DEFAULT_FONT_SIZE = 14;
|
||||
const MIN_FONT_SIZE = 12;
|
||||
const MAX_FONT_SIZE = 20;
|
||||
const DEFAULT_STARTUP_FULLSCREEN = false;
|
||||
const LEGACY_DEFAULT_OPACITY = 0.95;
|
||||
const OPACITY_EPSILON = 1e-6;
|
||||
const MAX_URI_LENGTH = 4096;
|
||||
const MAX_HOST_ENTRY_LENGTH = 512;
|
||||
const MAX_HOST_ENTRIES = 64;
|
||||
const DEFAULT_TIMEOUT_SECONDS = 30;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const PERSIST_VERSION = 6;
|
||||
const DEFAULT_CONNECTION_TYPE = 'mysql';
|
||||
const DEFAULT_GLOBAL_PROXY: GlobalProxyConfig = {
|
||||
enabled: false,
|
||||
type: 'socks5',
|
||||
host: '',
|
||||
port: 1080,
|
||||
user: '',
|
||||
password: '',
|
||||
};
|
||||
const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'doris',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'redis',
|
||||
'tdengine',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'kingbase',
|
||||
'sqlserver',
|
||||
'mongodb',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'custom',
|
||||
]);
|
||||
const SSL_SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'dameng',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'sqlserver',
|
||||
'oracle',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'mongodb',
|
||||
'redis',
|
||||
'tdengine',
|
||||
]);
|
||||
|
||||
const getDefaultPortByType = (type: string): number => {
|
||||
switch (type) {
|
||||
case 'mysql':
|
||||
case 'mariadb':
|
||||
return 3306;
|
||||
case 'doris':
|
||||
case 'diros':
|
||||
return 9030;
|
||||
case 'duckdb':
|
||||
return 0;
|
||||
case 'sphinx':
|
||||
return 9306;
|
||||
case 'clickhouse':
|
||||
return 9000;
|
||||
case 'postgres':
|
||||
case 'vastbase':
|
||||
return 5432;
|
||||
case 'redis':
|
||||
return 6379;
|
||||
case 'tdengine':
|
||||
return 6041;
|
||||
case 'oracle':
|
||||
return 1521;
|
||||
case 'dameng':
|
||||
return 5236;
|
||||
case 'kingbase':
|
||||
return 54321;
|
||||
case 'sqlserver':
|
||||
return 1433;
|
||||
case 'mongodb':
|
||||
return 27017;
|
||||
case 'highgo':
|
||||
return 5866;
|
||||
default:
|
||||
return 3306;
|
||||
}
|
||||
};
|
||||
|
||||
const toTrimmedString = (value: unknown, fallback = ''): string => {
|
||||
if (typeof value === 'string') {
|
||||
return value.trim();
|
||||
}
|
||||
if (typeof value === 'number' || typeof value === 'boolean') {
|
||||
return String(value).trim();
|
||||
}
|
||||
return fallback;
|
||||
};
|
||||
|
||||
const normalizePort = (value: unknown, fallbackPort: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackPort;
|
||||
const port = Math.trunc(parsed);
|
||||
if (port <= 0 || port > 65535) return fallbackPort;
|
||||
return port;
|
||||
};
|
||||
|
||||
const normalizeIntegerInRange = (value: unknown, fallbackValue: number, min: number, max: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackValue;
|
||||
const normalized = Math.trunc(parsed);
|
||||
if (normalized < min || normalized > max) return fallbackValue;
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const normalizeFloatInRange = (value: unknown, fallbackValue: number, min: number, max: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackValue;
|
||||
if (parsed < min || parsed > max) return fallbackValue;
|
||||
return parsed;
|
||||
};
|
||||
|
||||
const isValidHostEntry = (entry: string): boolean => {
|
||||
if (!entry) return false;
|
||||
if (entry.length > MAX_HOST_ENTRY_LENGTH) return false;
|
||||
if (/[()\\/\s]/.test(entry)) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
const sanitizeStringArray = (value: unknown, maxLength = 256): string[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const seen = new Set<string>();
|
||||
const result: string[] = [];
|
||||
value.forEach((entry) => {
|
||||
const normalized = toTrimmedString(entry);
|
||||
if (!normalized || normalized.length > maxLength) return;
|
||||
if (seen.has(normalized)) return;
|
||||
seen.add(normalized);
|
||||
result.push(normalized);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeNumberArray = (value: unknown, min: number, max: number): number[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const seen = new Set<number>();
|
||||
const result: number[] = [];
|
||||
value.forEach((entry) => {
|
||||
const parsed = Number(entry);
|
||||
if (!Number.isFinite(parsed)) return;
|
||||
const num = Math.trunc(parsed);
|
||||
if (num < min || num > max) return;
|
||||
if (seen.has(num)) return;
|
||||
seen.add(num);
|
||||
result.push(num);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeAddressList = (value: unknown): string[] => {
|
||||
const all = sanitizeStringArray(value, MAX_HOST_ENTRY_LENGTH)
|
||||
.filter((entry) => isValidHostEntry(entry));
|
||||
return all.slice(0, MAX_HOST_ENTRIES);
|
||||
};
|
||||
|
||||
const normalizeConnectionType = (value: unknown): string => {
|
||||
const type = toTrimmedString(value).toLowerCase();
|
||||
if (type === 'doris') {
|
||||
return 'diros';
|
||||
}
|
||||
return SUPPORTED_CONNECTION_TYPES.has(type) ? type : DEFAULT_CONNECTION_TYPE;
|
||||
};
|
||||
|
||||
const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const type = normalizeConnectionType(raw.type);
|
||||
const defaultPort = getDefaultPortByType(type);
|
||||
const savePassword = typeof raw.savePassword === 'boolean' ? raw.savePassword : true;
|
||||
const mongoSrv = !!raw.mongoSrv;
|
||||
const sslCapable = SSL_SUPPORTED_CONNECTION_TYPES.has(type);
|
||||
const sslModeRaw = toTrimmedString(raw.sslMode, 'preferred').toLowerCase();
|
||||
const sslMode: 'preferred' | 'required' | 'skip-verify' | 'disable' =
|
||||
sslModeRaw === 'required'
|
||||
? 'required'
|
||||
: sslModeRaw === 'skip-verify'
|
||||
? 'skip-verify'
|
||||
: sslModeRaw === 'disable'
|
||||
? 'disable'
|
||||
: 'preferred';
|
||||
|
||||
const sshRaw = (raw.ssh && typeof raw.ssh === 'object') ? raw.ssh as Record<string, unknown> : {};
|
||||
const ssh = {
|
||||
host: toTrimmedString(sshRaw.host),
|
||||
port: normalizePort(sshRaw.port, 22),
|
||||
user: toTrimmedString(sshRaw.user),
|
||||
password: toTrimmedString(sshRaw.password),
|
||||
keyPath: toTrimmedString(sshRaw.keyPath),
|
||||
};
|
||||
const proxyRaw = (raw.proxy && typeof raw.proxy === 'object') ? raw.proxy as Record<string, unknown> : {};
|
||||
const proxyTypeRaw = toTrimmedString(proxyRaw.type, 'socks5').toLowerCase();
|
||||
const proxyType: 'socks5' | 'http' = proxyTypeRaw === 'http' ? 'http' : 'socks5';
|
||||
const proxy = {
|
||||
type: proxyType,
|
||||
host: toTrimmedString(proxyRaw.host),
|
||||
port: normalizePort(proxyRaw.port, proxyTypeRaw === 'http' ? 8080 : 1080),
|
||||
user: toTrimmedString(proxyRaw.user),
|
||||
password: toTrimmedString(proxyRaw.password),
|
||||
};
|
||||
const httpTunnelRaw = (raw.httpTunnel && typeof raw.httpTunnel === 'object')
|
||||
? raw.httpTunnel as Record<string, unknown>
|
||||
: ((raw.HTTPTunnel && typeof raw.HTTPTunnel === 'object') ? raw.HTTPTunnel as Record<string, unknown> : {});
|
||||
const httpTunnel = {
|
||||
host: toTrimmedString(httpTunnelRaw.host ?? raw.httpTunnelHost),
|
||||
port: normalizePort(httpTunnelRaw.port ?? raw.httpTunnelPort, 8080),
|
||||
user: toTrimmedString(httpTunnelRaw.user ?? raw.httpTunnelUser),
|
||||
password: toTrimmedString(httpTunnelRaw.password ?? raw.httpTunnelPassword),
|
||||
};
|
||||
const supportsNetworkTunnel = type !== 'sqlite' && type !== 'duckdb';
|
||||
const useHttpTunnel = supportsNetworkTunnel && (raw.useHttpTunnel === true || raw.UseHTTPTunnel === true);
|
||||
const useProxy = supportsNetworkTunnel && !!raw.useProxy && !useHttpTunnel;
|
||||
|
||||
const safeConfig: ConnectionConfig & Record<string, unknown> = {
|
||||
...raw,
|
||||
type,
|
||||
host: toTrimmedString(raw.host, 'localhost') || 'localhost',
|
||||
port: normalizePort(raw.port, defaultPort),
|
||||
user: toTrimmedString(raw.user),
|
||||
password: savePassword ? toTrimmedString(raw.password) : '',
|
||||
savePassword,
|
||||
database: toTrimmedString(raw.database),
|
||||
useSSL: sslCapable ? !!raw.useSSL : false,
|
||||
sslMode: sslCapable ? sslMode : 'disable',
|
||||
sslCertPath: sslCapable ? toTrimmedString(raw.sslCertPath) : '',
|
||||
sslKeyPath: sslCapable ? toTrimmedString(raw.sslKeyPath) : '',
|
||||
useSSH: !!raw.useSSH,
|
||||
ssh,
|
||||
useProxy,
|
||||
proxy,
|
||||
useHttpTunnel,
|
||||
httpTunnel,
|
||||
uri: toTrimmedString(raw.uri).slice(0, MAX_URI_LENGTH),
|
||||
hosts: sanitizeAddressList(raw.hosts),
|
||||
topology: raw.topology === 'replica' ? 'replica' : (raw.topology === 'cluster' ? 'cluster' : 'single'),
|
||||
mysqlReplicaUser: toTrimmedString(raw.mysqlReplicaUser),
|
||||
mysqlReplicaPassword: savePassword ? toTrimmedString(raw.mysqlReplicaPassword) : '',
|
||||
replicaSet: toTrimmedString(raw.replicaSet),
|
||||
authSource: toTrimmedString(raw.authSource),
|
||||
readPreference: toTrimmedString(raw.readPreference),
|
||||
mongoSrv,
|
||||
mongoAuthMechanism: toTrimmedString(raw.mongoAuthMechanism),
|
||||
mongoReplicaUser: toTrimmedString(raw.mongoReplicaUser),
|
||||
mongoReplicaPassword: savePassword ? toTrimmedString(raw.mongoReplicaPassword) : '',
|
||||
timeout: normalizeIntegerInRange(raw.timeout, DEFAULT_TIMEOUT_SECONDS, 1, MAX_TIMEOUT_SECONDS),
|
||||
};
|
||||
|
||||
if (type === 'redis') {
|
||||
safeConfig.redisDB = normalizeIntegerInRange(raw.redisDB, 0, 0, 15);
|
||||
}
|
||||
|
||||
if (type === 'custom') {
|
||||
safeConfig.driver = toTrimmedString(raw.driver);
|
||||
safeConfig.dsn = toTrimmedString(raw.dsn).slice(0, MAX_URI_LENGTH);
|
||||
}
|
||||
|
||||
return safeConfig;
|
||||
};
|
||||
|
||||
const resolveConnectionConfigPayload = (raw: Record<string, unknown>): unknown => {
|
||||
if (raw.config && typeof raw.config === 'object') {
|
||||
return raw.config;
|
||||
}
|
||||
// 兼容历史/导入场景:连接对象可能是扁平结构(无 config 包装)。
|
||||
const hasLegacyFlatConfig =
|
||||
raw.type !== undefined ||
|
||||
raw.host !== undefined ||
|
||||
raw.port !== undefined ||
|
||||
raw.user !== undefined ||
|
||||
raw.database !== undefined;
|
||||
if (hasLegacyFlatConfig) {
|
||||
return raw;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const sanitizeSavedConnection = (value: unknown, index: number): SavedConnection | null => {
|
||||
if (!value || typeof value !== 'object') return null;
|
||||
const raw = value as Record<string, unknown>;
|
||||
const config = sanitizeConnectionConfig(resolveConnectionConfigPayload(raw));
|
||||
const id = toTrimmedString(raw.id, `conn-${index + 1}`) || `conn-${index + 1}`;
|
||||
const displayType = config.type === 'diros' ? 'doris' : config.type;
|
||||
const fallbackName = config.host ? `${displayType}-${config.host}` : `连接-${index + 1}`;
|
||||
const name = toTrimmedString(raw.name, fallbackName) || fallbackName;
|
||||
const includeDatabases = sanitizeStringArray(raw.includeDatabases, 256);
|
||||
const includeRedisDatabases = sanitizeNumberArray(raw.includeRedisDatabases, 0, 15);
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
config,
|
||||
includeDatabases: includeDatabases.length > 0 ? includeDatabases : undefined,
|
||||
includeRedisDatabases: includeRedisDatabases.length > 0 ? includeRedisDatabases : undefined,
|
||||
};
|
||||
};
|
||||
|
||||
const sanitizeConnections = (value: unknown): SavedConnection[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: SavedConnection[] = [];
|
||||
const idSet = new Set<string>();
|
||||
|
||||
value.forEach((entry, index) => {
|
||||
const conn = sanitizeSavedConnection(entry, index);
|
||||
if (!conn) return;
|
||||
let nextId = conn.id;
|
||||
if (idSet.has(nextId)) {
|
||||
nextId = `${nextId}-${index + 1}`;
|
||||
}
|
||||
idSet.add(nextId);
|
||||
result.push({ ...conn, id: nextId });
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeConnectionTags = (value: unknown): ConnectionTag[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: ConnectionTag[] = [];
|
||||
const idSet = new Set<string>();
|
||||
|
||||
value.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') return;
|
||||
const raw = entry as Record<string, unknown>;
|
||||
const id = toTrimmedString(raw.id, `tag-${index + 1}`) || `tag-${index + 1}`;
|
||||
if (idSet.has(id)) return;
|
||||
idSet.add(id);
|
||||
|
||||
const name = toTrimmedString(raw.name, `标签-${index + 1}`) || `标签-${index + 1}`;
|
||||
const connectionIds = sanitizeStringArray(raw.connectionIds, 256);
|
||||
|
||||
result.push({ id, name, connectionIds });
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const isLegacyDefaultAppearance = (appearance: Partial<{ opacity: number; blur: number }> | undefined): boolean => {
|
||||
if (!appearance) {
|
||||
return true;
|
||||
}
|
||||
const opacity = typeof appearance.opacity === 'number' ? appearance.opacity : LEGACY_DEFAULT_OPACITY;
|
||||
const blur = typeof appearance.blur === 'number' ? appearance.blur : 0;
|
||||
return Math.abs(opacity - LEGACY_DEFAULT_OPACITY) < OPACITY_EPSILON && blur === 0;
|
||||
};
|
||||
|
||||
export interface SqlLog {
|
||||
id: string;
|
||||
@@ -13,27 +387,58 @@ export interface SqlLog {
|
||||
affectedRows?: number;
|
||||
}
|
||||
|
||||
export interface QueryOptions {
|
||||
maxRows: number;
|
||||
showColumnComment: boolean;
|
||||
showColumnType: boolean;
|
||||
}
|
||||
|
||||
export interface GlobalProxyConfig extends ProxyConfig {
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
interface AppState {
|
||||
connections: SavedConnection[];
|
||||
connectionTags: ConnectionTag[];
|
||||
tabs: TabData[];
|
||||
activeTabId: string | null;
|
||||
activeContext: { connectionId: string; dbName: string } | null;
|
||||
savedQueries: SavedQuery[];
|
||||
theme: 'light' | 'dark';
|
||||
appearance: { opacity: number; blur: number };
|
||||
appearance: { enabled: boolean; opacity: number; blur: number };
|
||||
uiScale: number;
|
||||
fontSize: number;
|
||||
startupFullscreen: boolean;
|
||||
globalProxy: GlobalProxyConfig;
|
||||
sqlFormatOptions: { keywordCase: 'upper' | 'lower' };
|
||||
queryOptions: { maxRows: number };
|
||||
queryOptions: QueryOptions;
|
||||
shortcutOptions: ShortcutOptions;
|
||||
sqlLogs: SqlLog[];
|
||||
|
||||
tableAccessCount: Record<string, number>;
|
||||
tableSortPreference: Record<string, 'name' | 'frequency'>;
|
||||
tableColumnOrders: Record<string, string[]>;
|
||||
enableColumnOrderMemory: boolean;
|
||||
tableHiddenColumns: Record<string, string[]>;
|
||||
enableHiddenColumnMemory: boolean;
|
||||
|
||||
addConnection: (conn: SavedConnection) => void;
|
||||
updateConnection: (conn: SavedConnection) => void;
|
||||
removeConnection: (id: string) => void;
|
||||
|
||||
|
||||
addConnectionTag: (tag: ConnectionTag) => void;
|
||||
updateConnectionTag: (tag: ConnectionTag) => void;
|
||||
removeConnectionTag: (id: string) => void;
|
||||
moveConnectionToTag: (connectionId: string, targetTagId: string | null) => void;
|
||||
reorderTags: (tagIds: string[]) => void;
|
||||
|
||||
addTab: (tab: TabData) => void;
|
||||
closeTab: (id: string) => void;
|
||||
closeOtherTabs: (id: string) => void;
|
||||
closeTabsToLeft: (id: string) => void;
|
||||
closeTabsToRight: (id: string) => void;
|
||||
closeTabsByConnection: (connectionId: string) => void;
|
||||
closeTabsByDatabase: (connectionId: string, dbName: string) => void;
|
||||
moveTab: (sourceId: string, targetId: string) => void;
|
||||
closeAllTabs: () => void;
|
||||
setActiveTab: (id: string) => void;
|
||||
setActiveContext: (context: { connectionId: string; dbName: string } | null) => void;
|
||||
@@ -42,33 +447,239 @@ interface AppState {
|
||||
deleteQuery: (id: string) => void;
|
||||
|
||||
setTheme: (theme: 'light' | 'dark') => void;
|
||||
setAppearance: (appearance: Partial<{ opacity: number; blur: number }>) => void;
|
||||
setAppearance: (appearance: Partial<{ enabled: boolean; opacity: number; blur: number }>) => void;
|
||||
setUiScale: (scale: number) => void;
|
||||
setFontSize: (size: number) => void;
|
||||
setStartupFullscreen: (enabled: boolean) => void;
|
||||
setGlobalProxy: (proxy: Partial<GlobalProxyConfig>) => void;
|
||||
setSqlFormatOptions: (options: { keywordCase: 'upper' | 'lower' }) => void;
|
||||
setQueryOptions: (options: Partial<{ maxRows: number }>) => void;
|
||||
|
||||
setQueryOptions: (options: Partial<QueryOptions>) => void;
|
||||
updateShortcut: (action: ShortcutAction, binding: Partial<ShortcutBinding>) => void;
|
||||
resetShortcutOptions: () => void;
|
||||
|
||||
addSqlLog: (log: SqlLog) => void;
|
||||
clearSqlLogs: () => void;
|
||||
|
||||
recordTableAccess: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
setTableSortPreference: (connectionId: string, dbName: string, sortBy: 'name' | 'frequency') => void;
|
||||
setTableColumnOrder: (connectionId: string, dbName: string, tableName: string, order: string[]) => void;
|
||||
setEnableColumnOrderMemory: (enabled: boolean) => void;
|
||||
clearTableColumnOrder: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
|
||||
setTableHiddenColumns: (connectionId: string, dbName: string, tableName: string, hiddenColumns: string[]) => void;
|
||||
setEnableHiddenColumnMemory: (enabled: boolean) => void;
|
||||
clearTableHiddenColumns: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
}
|
||||
|
||||
const sanitizeSavedQueries = (value: unknown): SavedQuery[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: SavedQuery[] = [];
|
||||
value.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') return;
|
||||
const raw = entry as Record<string, unknown>;
|
||||
const id = toTrimmedString(raw.id, `query-${index + 1}`) || `query-${index + 1}`;
|
||||
const sql = toTrimmedString(raw.sql);
|
||||
const connectionId = toTrimmedString(raw.connectionId);
|
||||
const dbName = toTrimmedString(raw.dbName);
|
||||
if (!sql || !connectionId || !dbName) return;
|
||||
result.push({
|
||||
id,
|
||||
name: toTrimmedString(raw.name, `查询-${index + 1}`) || `查询-${index + 1}`,
|
||||
sql,
|
||||
connectionId,
|
||||
dbName,
|
||||
createdAt: Number.isFinite(Number(raw.createdAt)) ? Number(raw.createdAt) : Date.now(),
|
||||
});
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTheme = (value: unknown): 'light' | 'dark' => (value === 'dark' ? 'dark' : 'light');
|
||||
|
||||
const sanitizeSqlFormatOptions = (value: unknown): { keywordCase: 'upper' | 'lower' } => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
return { keywordCase: raw.keywordCase === 'lower' ? 'lower' : 'upper' };
|
||||
};
|
||||
|
||||
const sanitizeQueryOptions = (value: unknown): QueryOptions => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const maxRows = Number(raw.maxRows);
|
||||
const showColumnComment = typeof raw.showColumnComment === 'boolean' ? raw.showColumnComment : true;
|
||||
const showColumnType = typeof raw.showColumnType === 'boolean' ? raw.showColumnType : true;
|
||||
if (!Number.isFinite(maxRows) || maxRows <= 0) {
|
||||
return { maxRows: 5000, showColumnComment, showColumnType };
|
||||
}
|
||||
return { maxRows: Math.min(50000, Math.trunc(maxRows)), showColumnComment, showColumnType };
|
||||
};
|
||||
|
||||
const sanitizeTableAccessCount = (value: unknown): Record<string, number> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, number> = {};
|
||||
Object.entries(raw).forEach(([key, count]) => {
|
||||
const parsed = Number(count);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) return;
|
||||
result[key] = Math.trunc(parsed);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableSortPreference = (value: unknown): Record<string, 'name' | 'frequency'> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, 'name' | 'frequency'> = {};
|
||||
Object.entries(raw).forEach(([key, preference]) => {
|
||||
result[key] = preference === 'frequency' ? 'frequency' : 'name';
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableColumnOrders = (value: unknown): Record<string, string[]> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, string[]> = {};
|
||||
Object.entries(raw).forEach(([key, orderArray]) => {
|
||||
if (Array.isArray(orderArray)) {
|
||||
result[key] = orderArray.map(col => String(col));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableHiddenColumns = (value: unknown): Record<string, string[]> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, string[]> = {};
|
||||
Object.entries(raw).forEach(([key, hiddenArray]) => {
|
||||
if (Array.isArray(hiddenArray)) {
|
||||
result[key] = hiddenArray.map(col => String(col));
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeAppearance = (
|
||||
appearance: Partial<{ enabled: boolean; opacity: number; blur: number }> | undefined,
|
||||
version: number
|
||||
): { enabled: boolean; opacity: number; blur: number } => {
|
||||
if (!appearance || typeof appearance !== 'object') {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
const nextAppearance = {
|
||||
enabled: typeof appearance.enabled === 'boolean' ? appearance.enabled : DEFAULT_APPEARANCE.enabled,
|
||||
opacity: typeof appearance.opacity === 'number' ? appearance.opacity : DEFAULT_APPEARANCE.opacity,
|
||||
blur: typeof appearance.blur === 'number' ? appearance.blur : DEFAULT_APPEARANCE.blur,
|
||||
};
|
||||
if (version < 2 && isLegacyDefaultAppearance(appearance)) {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
return nextAppearance;
|
||||
};
|
||||
|
||||
const sanitizeStartupFullscreen = (value: unknown): boolean => {
|
||||
return value === true;
|
||||
};
|
||||
|
||||
const sanitizeUiScale = (value: unknown): number => {
|
||||
return normalizeFloatInRange(value, DEFAULT_UI_SCALE, MIN_UI_SCALE, MAX_UI_SCALE);
|
||||
};
|
||||
|
||||
const sanitizeFontSize = (value: unknown): number => {
|
||||
return normalizeIntegerInRange(value, DEFAULT_FONT_SIZE, MIN_FONT_SIZE, MAX_FONT_SIZE);
|
||||
};
|
||||
|
||||
const sanitizeGlobalProxy = (value: unknown): GlobalProxyConfig => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const typeRaw = toTrimmedString(raw.type, DEFAULT_GLOBAL_PROXY.type).toLowerCase();
|
||||
const type: 'socks5' | 'http' = typeRaw === 'http' ? 'http' : 'socks5';
|
||||
const fallbackPort = type === 'http' ? 8080 : 1080;
|
||||
return {
|
||||
enabled: raw.enabled === true,
|
||||
type,
|
||||
host: toTrimmedString(raw.host),
|
||||
port: normalizePort(raw.port, fallbackPort),
|
||||
user: toTrimmedString(raw.user),
|
||||
password: toTrimmedString(raw.password),
|
||||
};
|
||||
};
|
||||
|
||||
const unwrapPersistedAppState = (persistedState: unknown): Record<string, unknown> => {
|
||||
if (!persistedState || typeof persistedState !== 'object') {
|
||||
return {};
|
||||
}
|
||||
const raw = persistedState as Record<string, unknown>;
|
||||
if (raw.state && typeof raw.state === 'object') {
|
||||
return raw.state as Record<string, unknown>;
|
||||
}
|
||||
return raw;
|
||||
};
|
||||
|
||||
export const useStore = create<AppState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
connections: [],
|
||||
connectionTags: [],
|
||||
tabs: [],
|
||||
activeTabId: null,
|
||||
activeContext: null,
|
||||
savedQueries: [],
|
||||
theme: 'light',
|
||||
appearance: { opacity: 0.95, blur: 0 },
|
||||
appearance: { ...DEFAULT_APPEARANCE },
|
||||
uiScale: DEFAULT_UI_SCALE,
|
||||
fontSize: DEFAULT_FONT_SIZE,
|
||||
startupFullscreen: DEFAULT_STARTUP_FULLSCREEN,
|
||||
globalProxy: { ...DEFAULT_GLOBAL_PROXY },
|
||||
sqlFormatOptions: { keywordCase: 'upper' },
|
||||
queryOptions: { maxRows: 5000 },
|
||||
queryOptions: { maxRows: 5000, showColumnComment: true, showColumnType: true },
|
||||
shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS),
|
||||
sqlLogs: [],
|
||||
tableAccessCount: {},
|
||||
tableSortPreference: {},
|
||||
tableColumnOrders: {},
|
||||
enableColumnOrderMemory: true,
|
||||
tableHiddenColumns: {},
|
||||
enableHiddenColumnMemory: true,
|
||||
|
||||
addConnection: (conn) => set((state) => ({ connections: [...state.connections, conn] })),
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
})),
|
||||
removeConnection: (id) => set((state) => ({ connections: state.connections.filter(c => c.id !== id) })),
|
||||
removeConnection: (id) => set((state) => ({
|
||||
connections: state.connections.filter(c => c.id !== id),
|
||||
connectionTags: state.connectionTags.map(tag => ({
|
||||
...tag,
|
||||
connectionIds: tag.connectionIds.filter(cid => cid !== id)
|
||||
}))
|
||||
})),
|
||||
|
||||
addConnectionTag: (tag) => set((state) => ({ connectionTags: [...state.connectionTags, tag] })),
|
||||
updateConnectionTag: (tag) => set((state) => ({
|
||||
connectionTags: state.connectionTags.map(t => t.id === tag.id ? tag : t)
|
||||
})),
|
||||
removeConnectionTag: (id) => set((state) => ({
|
||||
connectionTags: state.connectionTags.filter(t => t.id !== id)
|
||||
})),
|
||||
moveConnectionToTag: (connectionId, targetTagId) => set((state) => {
|
||||
const newTags = state.connectionTags.map(tag => {
|
||||
//先从所有tag中移除该connection
|
||||
const filteredIds = tag.connectionIds.filter(id => id !== connectionId);
|
||||
if (tag.id === targetTagId) {
|
||||
return { ...tag, connectionIds: [...filteredIds, connectionId] };
|
||||
}
|
||||
return { ...tag, connectionIds: filteredIds };
|
||||
});
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
reorderTags: (tagIds) => set((state) => {
|
||||
const tagMap = new Map(state.connectionTags.map(t => [t.id, t]));
|
||||
const newTags: ConnectionTag[] = [];
|
||||
tagIds.forEach(id => {
|
||||
const tag = tagMap.get(id);
|
||||
if (tag) {
|
||||
newTags.push(tag);
|
||||
tagMap.delete(id);
|
||||
}
|
||||
});
|
||||
// 追加未指定的tag(如果有的话)
|
||||
newTags.push(...Array.from(tagMap.values()));
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
|
||||
addTab: (tab) => set((state) => {
|
||||
const index = state.tabs.findIndex(t => t.id === tab.id);
|
||||
@@ -112,6 +723,62 @@ export const useStore = create<AppState>()(
|
||||
return { tabs: newTabs, activeTabId: activeStillExists ? state.activeTabId : id };
|
||||
}),
|
||||
|
||||
closeTabsByConnection: (connectionId) => set((state) => {
|
||||
const targetConnectionId = String(connectionId || '').trim();
|
||||
if (!targetConnectionId) return state;
|
||||
const newTabs = state.tabs.filter(t => String(t.connectionId || '').trim() !== targetConnectionId);
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
const nextActiveTabId = activeStillExists
|
||||
? state.activeTabId
|
||||
: (newTabs.length > 0 ? newTabs[newTabs.length - 1].id : null);
|
||||
const nextActiveContext = state.activeContext?.connectionId === targetConnectionId ? null : state.activeContext;
|
||||
return {
|
||||
tabs: newTabs,
|
||||
activeTabId: nextActiveTabId,
|
||||
activeContext: nextActiveContext,
|
||||
};
|
||||
}),
|
||||
|
||||
closeTabsByDatabase: (connectionId, dbName) => set((state) => {
|
||||
const targetConnectionId = String(connectionId || '').trim();
|
||||
const targetDbName = String(dbName || '').trim();
|
||||
if (!targetConnectionId || !targetDbName) return state;
|
||||
const newTabs = state.tabs.filter((tab) => {
|
||||
const sameConnection = String(tab.connectionId || '').trim() === targetConnectionId;
|
||||
const sameDb = String(tab.dbName || '').trim() === targetDbName;
|
||||
return !(sameConnection && sameDb);
|
||||
});
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
const nextActiveTabId = activeStillExists
|
||||
? state.activeTabId
|
||||
: (newTabs.length > 0 ? newTabs[newTabs.length - 1].id : null);
|
||||
const sameActiveContext = state.activeContext
|
||||
&& state.activeContext.connectionId === targetConnectionId
|
||||
&& state.activeContext.dbName === targetDbName;
|
||||
return {
|
||||
tabs: newTabs,
|
||||
activeTabId: nextActiveTabId,
|
||||
activeContext: sameActiveContext ? null : state.activeContext,
|
||||
};
|
||||
}),
|
||||
|
||||
moveTab: (sourceId, targetId) => set((state) => {
|
||||
const fromId = String(sourceId || '').trim();
|
||||
const toId = String(targetId || '').trim();
|
||||
if (!fromId || !toId || fromId === toId) {
|
||||
return state;
|
||||
}
|
||||
const fromIndex = state.tabs.findIndex((tab) => tab.id === fromId);
|
||||
const toIndex = state.tabs.findIndex((tab) => tab.id === toId);
|
||||
if (fromIndex < 0 || toIndex < 0 || fromIndex === toIndex) {
|
||||
return state;
|
||||
}
|
||||
const nextTabs = [...state.tabs];
|
||||
const [movingTab] = nextTabs.splice(fromIndex, 1);
|
||||
nextTabs.splice(toIndex, 0, movingTab);
|
||||
return { tabs: nextTabs };
|
||||
}),
|
||||
|
||||
closeAllTabs: () => set(() => ({ tabs: [], activeTabId: null })),
|
||||
|
||||
setActiveTab: (id) => set({ activeTabId: id }),
|
||||
@@ -130,15 +797,164 @@ export const useStore = create<AppState>()(
|
||||
|
||||
setTheme: (theme) => set({ theme }),
|
||||
setAppearance: (appearance) => set((state) => ({ appearance: { ...state.appearance, ...appearance } })),
|
||||
setUiScale: (scale) => set({ uiScale: sanitizeUiScale(scale) }),
|
||||
setFontSize: (size) => set({ fontSize: sanitizeFontSize(size) }),
|
||||
setStartupFullscreen: (enabled) => set({ startupFullscreen: !!enabled }),
|
||||
setGlobalProxy: (proxy) => set((state) => ({ globalProxy: sanitizeGlobalProxy({ ...state.globalProxy, ...proxy }) })),
|
||||
setSqlFormatOptions: (options) => set({ sqlFormatOptions: options }),
|
||||
setQueryOptions: (options) => set((state) => ({ queryOptions: { ...state.queryOptions, ...options } })),
|
||||
|
||||
updateShortcut: (action, binding) => set((state) => ({
|
||||
shortcutOptions: {
|
||||
...state.shortcutOptions,
|
||||
[action]: {
|
||||
...state.shortcutOptions[action],
|
||||
...binding,
|
||||
},
|
||||
},
|
||||
})),
|
||||
resetShortcutOptions: () => set({ shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS) }),
|
||||
|
||||
addSqlLog: (log) => set((state) => ({ sqlLogs: [log, ...state.sqlLogs].slice(0, 1000) })), // Keep last 1000 logs
|
||||
clearSqlLogs: () => set({ sqlLogs: [] }),
|
||||
|
||||
recordTableAccess: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const currentCount = state.tableAccessCount[key] || 0;
|
||||
return {
|
||||
tableAccessCount: {
|
||||
...state.tableAccessCount,
|
||||
[key]: currentCount + 1
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
setTableSortPreference: (connectionId, dbName, sortBy) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}`;
|
||||
return {
|
||||
tableSortPreference: {
|
||||
...state.tableSortPreference,
|
||||
[key]: sortBy
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
setTableColumnOrder: (connectionId, dbName, tableName, order) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
return {
|
||||
tableColumnOrders: {
|
||||
...state.tableColumnOrders,
|
||||
[key]: order
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
clearTableColumnOrder: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const newOrders = { ...state.tableColumnOrders };
|
||||
delete newOrders[key];
|
||||
return { tableColumnOrders: newOrders };
|
||||
}),
|
||||
|
||||
setEnableColumnOrderMemory: (enabled) => set({ enableColumnOrderMemory: !!enabled }),
|
||||
|
||||
setTableHiddenColumns: (connectionId, dbName, tableName, hiddenColumns) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
return {
|
||||
tableHiddenColumns: {
|
||||
...state.tableHiddenColumns,
|
||||
[key]: hiddenColumns
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
clearTableHiddenColumns: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const newHidden = { ...state.tableHiddenColumns };
|
||||
delete newHidden[key];
|
||||
return { tableHiddenColumns: newHidden };
|
||||
}),
|
||||
|
||||
setEnableHiddenColumnMemory: (enabled) => set({ enableHiddenColumnMemory: !!enabled }),
|
||||
}),
|
||||
{
|
||||
name: 'lite-db-storage', // name of the item in the storage (must be unique)
|
||||
partialize: (state) => ({ connections: state.connections, savedQueries: state.savedQueries, theme: state.theme, appearance: state.appearance, sqlFormatOptions: state.sqlFormatOptions, queryOptions: state.queryOptions }), // Don't persist logs
|
||||
version: PERSIST_VERSION,
|
||||
migrate: (persistedState: unknown, version: number) => {
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
const nextState: Partial<AppState> = { ...state };
|
||||
nextState.connections = sanitizeConnections(state.connections);
|
||||
if (version < 5) {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
} else {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
}
|
||||
nextState.savedQueries = sanitizeSavedQueries(state.savedQueries);
|
||||
nextState.theme = sanitizeTheme(state.theme);
|
||||
nextState.appearance = sanitizeAppearance(state.appearance, version);
|
||||
nextState.uiScale = sanitizeUiScale(state.uiScale);
|
||||
nextState.fontSize = sanitizeFontSize(state.fontSize);
|
||||
nextState.startupFullscreen = sanitizeStartupFullscreen(state.startupFullscreen);
|
||||
nextState.globalProxy = sanitizeGlobalProxy(state.globalProxy);
|
||||
nextState.sqlFormatOptions = sanitizeSqlFormatOptions(state.sqlFormatOptions);
|
||||
nextState.queryOptions = sanitizeQueryOptions(state.queryOptions);
|
||||
nextState.shortcutOptions = sanitizeShortcutOptions(state.shortcutOptions);
|
||||
nextState.tableAccessCount = sanitizeTableAccessCount(state.tableAccessCount);
|
||||
nextState.tableSortPreference = sanitizeTableSortPreference(state.tableSortPreference);
|
||||
// 新增的列排序记忆状态不需要做版本特殊兼容,直接做基本的类型保护
|
||||
const safeOrders = sanitizeTableColumnOrders(state.tableColumnOrders);
|
||||
nextState.tableColumnOrders = safeOrders;
|
||||
nextState.enableColumnOrderMemory = state.enableColumnOrderMemory !== false;
|
||||
const safeHidden = sanitizeTableHiddenColumns(state.tableHiddenColumns);
|
||||
nextState.tableHiddenColumns = safeHidden;
|
||||
nextState.enableHiddenColumnMemory = state.enableHiddenColumnMemory !== false;
|
||||
return nextState as AppState;
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
return {
|
||||
...currentState,
|
||||
...state,
|
||||
connections: sanitizeConnections(state.connections),
|
||||
connectionTags: sanitizeConnectionTags(state.connectionTags),
|
||||
savedQueries: sanitizeSavedQueries(state.savedQueries),
|
||||
theme: sanitizeTheme(state.theme),
|
||||
appearance: sanitizeAppearance(state.appearance, PERSIST_VERSION),
|
||||
uiScale: sanitizeUiScale(state.uiScale),
|
||||
fontSize: sanitizeFontSize(state.fontSize),
|
||||
startupFullscreen: sanitizeStartupFullscreen(state.startupFullscreen),
|
||||
globalProxy: sanitizeGlobalProxy(state.globalProxy),
|
||||
tableSortPreference: sanitizeTableSortPreference(state.tableSortPreference),
|
||||
tableColumnOrders: sanitizeTableColumnOrders(state.tableColumnOrders),
|
||||
enableColumnOrderMemory: state.enableColumnOrderMemory !== false,
|
||||
tableHiddenColumns: sanitizeTableHiddenColumns(state.tableHiddenColumns),
|
||||
enableHiddenColumnMemory: state.enableHiddenColumnMemory !== false,
|
||||
|
||||
sqlFormatOptions: sanitizeSqlFormatOptions(state.sqlFormatOptions),
|
||||
queryOptions: sanitizeQueryOptions(state.queryOptions),
|
||||
shortcutOptions: sanitizeShortcutOptions(state.shortcutOptions),
|
||||
tableAccessCount: sanitizeTableAccessCount(state.tableAccessCount),
|
||||
};
|
||||
},
|
||||
partialize: (state) => ({
|
||||
connections: state.connections,
|
||||
connectionTags: state.connectionTags,
|
||||
savedQueries: state.savedQueries,
|
||||
theme: state.theme,
|
||||
appearance: state.appearance,
|
||||
uiScale: state.uiScale,
|
||||
fontSize: state.fontSize,
|
||||
startupFullscreen: state.startupFullscreen,
|
||||
globalProxy: state.globalProxy,
|
||||
sqlFormatOptions: state.sqlFormatOptions,
|
||||
queryOptions: state.queryOptions,
|
||||
shortcutOptions: state.shortcutOptions,
|
||||
tableAccessCount: state.tableAccessCount,
|
||||
tableSortPreference: state.tableSortPreference,
|
||||
tableColumnOrders: state.tableColumnOrders,
|
||||
enableColumnOrderMemory: state.enableColumnOrderMemory,
|
||||
tableHiddenColumns: state.tableHiddenColumns,
|
||||
enableHiddenColumnMemory: state.enableHiddenColumnMemory
|
||||
}), // Don't persist logs
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
@@ -6,16 +6,64 @@ export interface SSHConfig {
|
||||
keyPath?: string;
|
||||
}
|
||||
|
||||
export interface ProxyConfig {
|
||||
type: 'socks5' | 'http';
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface ConnectionConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
port: number;
|
||||
user: string;
|
||||
password?: string;
|
||||
savePassword?: boolean;
|
||||
database?: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: 'preferred' | 'required' | 'skip-verify' | 'disable';
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH?: boolean;
|
||||
ssh?: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number; // Redis database index (0-15)
|
||||
uri?: string; // Connection URI for copy/paste
|
||||
hosts?: string[]; // Multi-host addresses: host:port
|
||||
topology?: 'single' | 'replica' | 'cluster';
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
authSource?: string;
|
||||
readPreference?: string;
|
||||
mongoSrv?: boolean;
|
||||
mongoAuthMechanism?: string;
|
||||
mongoReplicaUser?: string;
|
||||
mongoReplicaPassword?: string;
|
||||
}
|
||||
|
||||
export interface MongoMemberInfo {
|
||||
host: string;
|
||||
role: string;
|
||||
state: string;
|
||||
stateCode?: number;
|
||||
healthy: boolean;
|
||||
isSelf?: boolean;
|
||||
}
|
||||
|
||||
export interface SavedConnection {
|
||||
@@ -26,6 +74,12 @@ export interface SavedConnection {
|
||||
includeRedisDatabases?: number[]; // Redis databases to show (0-15)
|
||||
}
|
||||
|
||||
export interface ConnectionTag {
|
||||
id: string;
|
||||
name: string;
|
||||
connectionIds: string[];
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
name: string;
|
||||
type: string;
|
||||
@@ -62,7 +116,7 @@ export interface TriggerDefinition {
|
||||
export interface TabData {
|
||||
id: string;
|
||||
title: string;
|
||||
type: 'query' | 'table' | 'design' | 'redis-keys' | 'redis-command';
|
||||
type: 'query' | 'table' | 'design' | 'redis-keys' | 'redis-command' | 'trigger' | 'view-def' | 'routine-def';
|
||||
connectionId: string;
|
||||
dbName?: string;
|
||||
tableName?: string;
|
||||
@@ -70,6 +124,10 @@ export interface TabData {
|
||||
initialTab?: string;
|
||||
readOnly?: boolean;
|
||||
redisDB?: number; // Redis database index for redis tabs
|
||||
triggerName?: string; // Trigger name for trigger tabs
|
||||
viewName?: string; // View name for view definition tabs
|
||||
routineName?: string; // Routine name for function/procedure definition tabs
|
||||
routineType?: string; // 'FUNCTION' or 'PROCEDURE'
|
||||
}
|
||||
|
||||
export interface DatabaseNode {
|
||||
@@ -98,11 +156,11 @@ export interface RedisKeyInfo {
|
||||
|
||||
export interface RedisScanResult {
|
||||
keys: RedisKeyInfo[];
|
||||
cursor: number;
|
||||
cursor: string;
|
||||
}
|
||||
|
||||
export interface RedisValue {
|
||||
type: 'string' | 'hash' | 'list' | 'set' | 'zset';
|
||||
type: 'string' | 'hash' | 'list' | 'set' | 'zset' | 'stream';
|
||||
ttl: number;
|
||||
value: any;
|
||||
length: number;
|
||||
@@ -117,3 +175,8 @@ export interface ZSetMember {
|
||||
member: string;
|
||||
score: number;
|
||||
}
|
||||
|
||||
export interface StreamEntry {
|
||||
id: string;
|
||||
fields: Record<string, string>;
|
||||
}
|
||||
|
||||
82
frontend/src/utils/appearance.ts
Normal file
82
frontend/src/utils/appearance.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
const DEFAULT_OPACITY = 1.0;
|
||||
const MIN_OPACITY = 0.1;
|
||||
const MAX_OPACITY = 1.0;
|
||||
|
||||
// 平台透明度映射因子:值越大,滑块变化越平滑(1.0 = 线性映射)
|
||||
const MAC_OPACITY_FACTOR = 0.60;
|
||||
const MAC_BLUR_FACTOR = 1.00;
|
||||
const WINDOWS_OPACITY_FACTOR = 0.70;
|
||||
const WINDOWS_BLUR_FACTOR = 1.00;
|
||||
|
||||
const clamp = (value: number, min: number, max: number) => Math.min(max, Math.max(min, value));
|
||||
|
||||
export interface AppearanceSettingsLike {
|
||||
enabled?: boolean;
|
||||
opacity?: number;
|
||||
blur?: number;
|
||||
}
|
||||
|
||||
export const resolveAppearanceValues = (appearance: AppearanceSettingsLike | undefined): { opacity: number; blur: number } => {
|
||||
if (!appearance || appearance.enabled !== false) {
|
||||
return {
|
||||
opacity: appearance?.opacity ?? DEFAULT_OPACITY,
|
||||
blur: appearance?.blur ?? 0,
|
||||
};
|
||||
}
|
||||
return { opacity: DEFAULT_OPACITY, blur: 0 };
|
||||
};
|
||||
|
||||
export const isMacLikePlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
const platform = navigator.platform || '';
|
||||
const ua = navigator.userAgent || '';
|
||||
return /(Mac|iPhone|iPad|iPod)/i.test(`${platform} ${ua}`);
|
||||
};
|
||||
|
||||
export const isWindowsPlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
const platform = navigator.platform || '';
|
||||
const ua = navigator.userAgent || '';
|
||||
return /(Win|Windows)/i.test(`${platform} ${ua}`);
|
||||
};
|
||||
|
||||
const getPlatformFactors = () => {
|
||||
if (isMacLikePlatform()) {
|
||||
return { opacity: MAC_OPACITY_FACTOR, blur: MAC_BLUR_FACTOR };
|
||||
}
|
||||
if (isWindowsPlatform()) {
|
||||
return { opacity: WINDOWS_OPACITY_FACTOR, blur: WINDOWS_BLUR_FACTOR };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const normalizeOpacityForPlatform = (opacity: number | undefined): number => {
|
||||
const raw = clamp(opacity ?? DEFAULT_OPACITY, MIN_OPACITY, MAX_OPACITY);
|
||||
// 用户显式拉到 100%% 时,必须保持完全不透明,不能再被平台映射压低。
|
||||
if (raw >= MAX_OPACITY - 1e-6) {
|
||||
return MAX_OPACITY;
|
||||
}
|
||||
const factors = getPlatformFactors();
|
||||
if (!factors) {
|
||||
return raw;
|
||||
}
|
||||
|
||||
return clamp(MIN_OPACITY + (raw - MIN_OPACITY) * factors.opacity, MIN_OPACITY, MAX_OPACITY);
|
||||
};
|
||||
|
||||
export const normalizeBlurForPlatform = (blur: number | undefined): number => {
|
||||
const raw = Math.max(0, blur ?? 0);
|
||||
const factors = getPlatformFactors();
|
||||
if (!factors) {
|
||||
return raw;
|
||||
}
|
||||
return Math.round(raw * factors.blur);
|
||||
};
|
||||
|
||||
export const blurToFilter = (blur: number): string | undefined => {
|
||||
return blur > 0 ? `blur(${blur}px)` : undefined;
|
||||
};
|
||||
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { ConnectionConfig } from '../types';
|
||||
|
||||
type ConnectionLike = Pick<ConnectionConfig, 'type' | 'driver'> | null | undefined;
|
||||
|
||||
const normalizeDataSourceToken = (raw: string): string => {
|
||||
const normalized = String(raw || '').trim().toLowerCase();
|
||||
switch (normalized) {
|
||||
case 'doris':
|
||||
return 'diros';
|
||||
case 'postgresql':
|
||||
return 'postgres';
|
||||
case 'dm':
|
||||
return 'dameng';
|
||||
default:
|
||||
return normalized;
|
||||
}
|
||||
};
|
||||
|
||||
export const resolveDataSourceType = (config: ConnectionLike): string => {
|
||||
if (!config) return '';
|
||||
const type = normalizeDataSourceToken(String(config.type || ''));
|
||||
if (type === 'custom') {
|
||||
const driver = normalizeDataSourceToken(String(config.driver || ''));
|
||||
return driver || 'custom';
|
||||
}
|
||||
return type;
|
||||
};
|
||||
|
||||
const SQL_QUERY_EXPORT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const COPY_INSERT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const QUERY_EDITOR_DISABLED_TYPES = new Set(['redis']);
|
||||
const FORCE_READ_ONLY_QUERY_TYPES = new Set(['tdengine', 'clickhouse']);
|
||||
|
||||
export type DataSourceCapabilities = {
|
||||
type: string;
|
||||
supportsQueryEditor: boolean;
|
||||
supportsSqlQueryExport: boolean;
|
||||
supportsCopyInsert: boolean;
|
||||
forceReadOnlyQueryResult: boolean;
|
||||
};
|
||||
|
||||
export const getDataSourceCapabilities = (config: ConnectionLike): DataSourceCapabilities => {
|
||||
const type = resolveDataSourceType(config);
|
||||
return {
|
||||
type,
|
||||
supportsQueryEditor: !QUERY_EDITOR_DISABLED_TYPES.has(type),
|
||||
supportsSqlQueryExport: SQL_QUERY_EXPORT_TYPES.has(type),
|
||||
supportsCopyInsert: COPY_INSERT_TYPES.has(type),
|
||||
forceReadOnlyQueryResult: FORCE_READ_ONLY_QUERY_TYPES.has(type),
|
||||
};
|
||||
};
|
||||
|
||||
1014
frontend/src/utils/mongodb.ts
Normal file
1014
frontend/src/utils/mongodb.ts
Normal file
File diff suppressed because it is too large
Load Diff
258
frontend/src/utils/shortcuts.ts
Normal file
258
frontend/src/utils/shortcuts.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import type { KeyboardEvent as ReactKeyboardEvent } from 'react';
|
||||
|
||||
export type ShortcutAction =
|
||||
| 'runQuery'
|
||||
| 'focusSidebarSearch'
|
||||
| 'newQueryTab'
|
||||
| 'toggleLogPanel'
|
||||
| 'toggleTheme'
|
||||
| 'openShortcutManager';
|
||||
|
||||
export interface ShortcutBinding {
|
||||
combo: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export type ShortcutOptions = Record<ShortcutAction, ShortcutBinding>;
|
||||
|
||||
export interface ShortcutActionMeta {
|
||||
label: string;
|
||||
description: string;
|
||||
allowInEditable?: boolean;
|
||||
}
|
||||
|
||||
const MODIFIER_ORDER = ['Ctrl', 'Meta', 'Alt', 'Shift'] as const;
|
||||
const MODIFIER_SET = new Set(MODIFIER_ORDER);
|
||||
|
||||
const KEY_ALIASES: Record<string, string> = {
|
||||
control: 'Ctrl',
|
||||
ctrl: 'Ctrl',
|
||||
command: 'Meta',
|
||||
cmd: 'Meta',
|
||||
meta: 'Meta',
|
||||
option: 'Alt',
|
||||
alt: 'Alt',
|
||||
shift: 'Shift',
|
||||
escape: 'Esc',
|
||||
esc: 'Esc',
|
||||
return: 'Enter',
|
||||
enter: 'Enter',
|
||||
tab: 'Tab',
|
||||
space: 'Space',
|
||||
' ': 'Space',
|
||||
backspace: 'Backspace',
|
||||
delete: 'Delete',
|
||||
del: 'Delete',
|
||||
arrowup: 'Up',
|
||||
up: 'Up',
|
||||
arrowdown: 'Down',
|
||||
down: 'Down',
|
||||
arrowleft: 'Left',
|
||||
left: 'Left',
|
||||
arrowright: 'Right',
|
||||
right: 'Right',
|
||||
pagedown: 'PageDown',
|
||||
pageup: 'PageUp',
|
||||
home: 'Home',
|
||||
end: 'End',
|
||||
insert: 'Insert',
|
||||
',': ',',
|
||||
'.': '.',
|
||||
'/': '/',
|
||||
';': ';',
|
||||
"'": "'",
|
||||
'[': '[',
|
||||
']': ']',
|
||||
'\\': '\\',
|
||||
'-': '-',
|
||||
'=': '=',
|
||||
'`': '`',
|
||||
};
|
||||
|
||||
export const SHORTCUT_ACTION_ORDER: ShortcutAction[] = [
|
||||
'runQuery',
|
||||
'focusSidebarSearch',
|
||||
'newQueryTab',
|
||||
'toggleLogPanel',
|
||||
'toggleTheme',
|
||||
'openShortcutManager',
|
||||
];
|
||||
|
||||
export const SHORTCUT_ACTION_META: Record<ShortcutAction, ShortcutActionMeta> = {
|
||||
runQuery: {
|
||||
label: '执行 SQL',
|
||||
description: '在当前查询页执行 SQL',
|
||||
},
|
||||
focusSidebarSearch: {
|
||||
label: '聚焦侧边栏搜索',
|
||||
description: '定位到左侧连接树搜索框',
|
||||
allowInEditable: true,
|
||||
},
|
||||
newQueryTab: {
|
||||
label: '新建查询页',
|
||||
description: '创建一个新的 SQL 查询标签页',
|
||||
},
|
||||
toggleLogPanel: {
|
||||
label: '切换日志面板',
|
||||
description: '打开或关闭 SQL 执行日志面板',
|
||||
},
|
||||
toggleTheme: {
|
||||
label: '切换主题',
|
||||
description: '在亮色和暗色主题之间切换',
|
||||
},
|
||||
openShortcutManager: {
|
||||
label: '打开快捷键管理',
|
||||
description: '打开快捷键设置面板',
|
||||
allowInEditable: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const DEFAULT_SHORTCUT_OPTIONS: ShortcutOptions = {
|
||||
runQuery: { combo: 'Ctrl+Shift+R', enabled: true },
|
||||
focusSidebarSearch: { combo: 'Ctrl+F', enabled: true },
|
||||
newQueryTab: { combo: 'Ctrl+Shift+N', enabled: true },
|
||||
toggleLogPanel: { combo: 'Ctrl+Shift+L', enabled: true },
|
||||
toggleTheme: { combo: 'Ctrl+Shift+D', enabled: true },
|
||||
openShortcutManager: { combo: 'Ctrl+,', enabled: true },
|
||||
};
|
||||
|
||||
const normalizeKeyToken = (value: string): string => {
|
||||
const token = String(value || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (token.length === 1) {
|
||||
return token === '+' ? '+' : token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1).toLowerCase() : token;
|
||||
};
|
||||
|
||||
export const normalizeShortcutCombo = (combo: string): string => {
|
||||
const raw = String(combo || '').trim();
|
||||
if (!raw) return '';
|
||||
|
||||
const pieces = raw
|
||||
.split('+')
|
||||
.map(part => part.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
const modifiers: string[] = [];
|
||||
let key = '';
|
||||
|
||||
pieces.forEach((part) => {
|
||||
const normalized = normalizeKeyToken(part);
|
||||
if (!normalized) return;
|
||||
if (MODIFIER_SET.has(normalized as typeof MODIFIER_ORDER[number])) {
|
||||
if (!modifiers.includes(normalized)) {
|
||||
modifiers.push(normalized);
|
||||
}
|
||||
return;
|
||||
}
|
||||
key = normalized;
|
||||
});
|
||||
|
||||
modifiers.sort((a, b) => MODIFIER_ORDER.indexOf(a as typeof MODIFIER_ORDER[number]) - MODIFIER_ORDER.indexOf(b as typeof MODIFIER_ORDER[number]));
|
||||
if (!key) {
|
||||
return modifiers.join('+');
|
||||
}
|
||||
return [...modifiers, key].join('+');
|
||||
};
|
||||
|
||||
const normalizeKeyboardKey = (key: string): string => {
|
||||
const token = String(key || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (token.length === 1) {
|
||||
if (token === ' ') return 'Space';
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1) : token;
|
||||
};
|
||||
|
||||
export const eventToShortcut = (event: KeyboardEvent | ReactKeyboardEvent): string => {
|
||||
const key = normalizeKeyboardKey(event.key);
|
||||
if (!key || MODIFIER_SET.has(key as typeof MODIFIER_ORDER[number])) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const modifiers: string[] = [];
|
||||
if (event.ctrlKey) modifiers.push('Ctrl');
|
||||
if (event.metaKey) modifiers.push('Meta');
|
||||
if (event.altKey) modifiers.push('Alt');
|
||||
if (event.shiftKey) modifiers.push('Shift');
|
||||
|
||||
return normalizeShortcutCombo([...modifiers, key].join('+'));
|
||||
};
|
||||
|
||||
export const isShortcutMatch = (event: KeyboardEvent | ReactKeyboardEvent, combo: string): boolean => {
|
||||
const expected = normalizeShortcutCombo(combo);
|
||||
if (!expected) return false;
|
||||
const actual = eventToShortcut(event);
|
||||
return actual === expected;
|
||||
};
|
||||
|
||||
export const hasModifierKey = (combo: string): boolean => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
if (!normalized) return false;
|
||||
return normalized.split('+').some(part => MODIFIER_SET.has(part as typeof MODIFIER_ORDER[number]));
|
||||
};
|
||||
|
||||
export const cloneShortcutOptions = (value: ShortcutOptions): ShortcutOptions => {
|
||||
return SHORTCUT_ACTION_ORDER.reduce((acc, action) => {
|
||||
acc[action] = {
|
||||
combo: normalizeShortcutCombo(value[action]?.combo || DEFAULT_SHORTCUT_OPTIONS[action].combo),
|
||||
enabled: value[action]?.enabled !== false,
|
||||
};
|
||||
return acc;
|
||||
}, {} as ShortcutOptions);
|
||||
};
|
||||
|
||||
export const sanitizeShortcutOptions = (value: unknown): ShortcutOptions => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const defaults = cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS);
|
||||
|
||||
SHORTCUT_ACTION_ORDER.forEach((action) => {
|
||||
const actionRaw = raw[action];
|
||||
if (!actionRaw || typeof actionRaw !== 'object') {
|
||||
return;
|
||||
}
|
||||
const binding = actionRaw as Record<string, unknown>;
|
||||
const combo = normalizeShortcutCombo(String(binding.combo || defaults[action].combo));
|
||||
defaults[action] = {
|
||||
combo: combo || defaults[action].combo,
|
||||
enabled: binding.enabled === false ? false : true,
|
||||
};
|
||||
});
|
||||
|
||||
return defaults;
|
||||
};
|
||||
|
||||
export const isEditableElement = (target: EventTarget | null): boolean => {
|
||||
if (!(target instanceof HTMLElement)) {
|
||||
return false;
|
||||
}
|
||||
const tag = target.tagName.toLowerCase();
|
||||
if (target.isContentEditable) {
|
||||
return true;
|
||||
}
|
||||
if (tag === 'input' || tag === 'textarea' || tag === 'select') {
|
||||
return true;
|
||||
}
|
||||
if (target.closest('.monaco-editor, .monaco-inputbox, .ant-select, .ant-picker, .ant-input')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export const getShortcutDisplay = (combo: string): string => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
return normalized || '-';
|
||||
};
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
export type FilterCondition = {
|
||||
id?: number;
|
||||
enabled?: boolean;
|
||||
logic?: 'AND' | 'OR';
|
||||
column?: string;
|
||||
op?: string;
|
||||
value?: string;
|
||||
@@ -23,6 +25,8 @@ const needsQuote = (ident: string): boolean => {
|
||||
if (!ident) return false;
|
||||
// 如果包含特殊字符(非字母、数字、下划线)则需要引号
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(ident)) return true;
|
||||
// PostgreSQL 会将未加引号的标识符折叠为小写,含大写字母时必须加引号
|
||||
if (/[A-Z]/.test(ident)) return true;
|
||||
// 常见 SQL 保留字列表(简化版)
|
||||
const reserved = ['select', 'from', 'where', 'table', 'index', 'user', 'order', 'group', 'by', 'limit', 'offset', 'and', 'or', 'not', 'null', 'true', 'false', 'key', 'primary', 'foreign', 'references', 'default', 'constraint', 'create', 'drop', 'alter', 'insert', 'update', 'delete', 'set', 'values', 'into', 'join', 'left', 'right', 'inner', 'outer', 'on', 'as', 'is', 'in', 'like', 'between', 'case', 'when', 'then', 'else', 'end', 'having', 'distinct', 'all', 'any', 'exists', 'union', 'except', 'intersect'];
|
||||
return reserved.includes(ident.toLowerCase());
|
||||
@@ -33,7 +37,7 @@ export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
if (!raw) return raw;
|
||||
const dbTypeLower = (dbType || '').toLowerCase();
|
||||
|
||||
if (dbTypeLower === 'mysql') {
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine' || dbTypeLower === 'clickhouse') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
|
||||
@@ -46,6 +50,11 @@ export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
return raw;
|
||||
}
|
||||
|
||||
// SQL Server 使用 [bracket] 标识符
|
||||
if (dbTypeLower === 'sqlserver' || dbTypeLower === 'mssql') {
|
||||
return `[${raw.replace(/]/g, ']]')}]`;
|
||||
}
|
||||
|
||||
// 其他数据库默认加双引号
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
@@ -60,6 +69,112 @@ export const quoteQualifiedIdent = (dbType: string, ident: string) => {
|
||||
|
||||
export const escapeLiteral = (val: string) => (val || '').replace(/'/g, "''");
|
||||
|
||||
type SortInfo = {
|
||||
columnKey?: string;
|
||||
order?: string;
|
||||
} | null | undefined;
|
||||
|
||||
// 为排序查询按库类型注入 sort_buffer 提升参数(仅影响当前语句)。
|
||||
// MySQL: 使用 Optimizer Hint `SET_VAR`。
|
||||
// MariaDB: 使用 `SET STATEMENT ... FOR` 包装当前查询。
|
||||
export const withSortBufferTuningSQL = (
|
||||
dbType: string,
|
||||
sql: string,
|
||||
sortBufferBytes: number,
|
||||
) => {
|
||||
const rawSql = String(sql || '');
|
||||
const trimmed = rawSql.trim();
|
||||
if (!trimmed) return rawSql;
|
||||
if (!/^select\b/i.test(trimmed)) return rawSql;
|
||||
|
||||
const normalizedType = String(dbType || '').trim().toLowerCase();
|
||||
const bytes = Math.max(256 * 1024, Math.floor(Number(sortBufferBytes) || 0));
|
||||
if (normalizedType === 'mysql') {
|
||||
return rawSql.replace(
|
||||
/^\s*select\b/i,
|
||||
(matched) => `${matched} /*+ SET_VAR(sort_buffer_size=${bytes}) */`,
|
||||
);
|
||||
}
|
||||
if (normalizedType === 'mariadb') {
|
||||
return `SET STATEMENT sort_buffer_size=${bytes} FOR ${rawSql}`;
|
||||
}
|
||||
return rawSql;
|
||||
};
|
||||
|
||||
export const buildOrderBySQL = (
|
||||
dbType: string,
|
||||
sortInfo: SortInfo,
|
||||
fallbackColumns: string[] = [],
|
||||
) => {
|
||||
const dbTypeLower = String(dbType || '').trim().toLowerCase();
|
||||
const sortColumn = normalizeIdentPart(String(sortInfo?.columnKey || ''));
|
||||
const sortOrder = String(sortInfo?.order || '');
|
||||
const direction = sortOrder === 'ascend' ? 'ASC' : sortOrder === 'descend' ? 'DESC' : '';
|
||||
if (sortColumn && direction) {
|
||||
return ` ORDER BY ${quoteIdentPart(dbType, sortColumn)} ${direction}`;
|
||||
}
|
||||
|
||||
// MySQL/MariaDB 大表在无显式排序需求时强制 ORDER BY(即使按主键)可能触发 filesort,
|
||||
// 导致 `Error 1038 (HY001): Out of sort memory`。
|
||||
// 因此仅在用户主动点击排序时下发 ORDER BY,默认分页查询不加兜底排序。
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
const stableColumns = (fallbackColumns || [])
|
||||
.map((col) => normalizeIdentPart(String(col || '')))
|
||||
.filter((col) => {
|
||||
if (!col) return false;
|
||||
const key = col.toLowerCase();
|
||||
if (seen.has(key)) return false;
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
if (stableColumns.length > 0) {
|
||||
const parts = stableColumns.map((col) => `${quoteIdentPart(dbType, col)} ASC`);
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
export const buildPaginatedSelectSQL = (
|
||||
dbType: string,
|
||||
baseSql: string,
|
||||
orderBySQL: string,
|
||||
limit: number,
|
||||
offset: number,
|
||||
) => {
|
||||
const normalizedType = String(dbType || '').trim().toLowerCase();
|
||||
const safeLimit = Math.max(0, Math.floor(Number(limit) || 0));
|
||||
const safeOffset = Math.max(0, Math.floor(Number(offset) || 0));
|
||||
const base = String(baseSql || '').trim();
|
||||
const orderBy = String(orderBySQL || '');
|
||||
|
||||
if (!base || safeLimit <= 0) {
|
||||
return `${base}${orderBy}`;
|
||||
}
|
||||
|
||||
switch (normalizedType) {
|
||||
case 'oracle': {
|
||||
const orderedSql = `${base}${orderBy}`;
|
||||
const upperBound = safeOffset + safeLimit;
|
||||
if (safeOffset <= 0) {
|
||||
return `SELECT * FROM (${orderedSql}) WHERE ROWNUM <= ${upperBound}`;
|
||||
}
|
||||
return `SELECT * FROM (SELECT "__gonavi_page__".*, ROWNUM "__gonavi_rn__" FROM (${orderedSql}) "__gonavi_page__" WHERE ROWNUM <= ${upperBound}) WHERE "__gonavi_rn__" > ${safeOffset}`;
|
||||
}
|
||||
case 'sqlserver':
|
||||
case 'mssql': {
|
||||
const effectiveOrderBy = orderBy.trim() ? orderBy : ' ORDER BY (SELECT NULL)';
|
||||
return `${base}${effectiveOrderBy} OFFSET ${safeOffset} ROWS FETCH NEXT ${safeLimit} ROWS ONLY`;
|
||||
}
|
||||
default:
|
||||
return `${base}${orderBy} LIMIT ${safeLimit} OFFSET ${safeOffset}`;
|
||||
}
|
||||
};
|
||||
|
||||
export const parseListValues = (val: string) => {
|
||||
const raw = (val || '').trim();
|
||||
if (!raw) return [];
|
||||
@@ -69,18 +184,31 @@ export const parseListValues = (val: string) => {
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
const normalizeConditionLogic = (logic: unknown): 'AND' | 'OR' => {
|
||||
return String(logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND';
|
||||
};
|
||||
|
||||
export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) => {
|
||||
const whereParts: string[] = [];
|
||||
const whereParts: Array<{ expr: string; logic: 'AND' | 'OR' }> = [];
|
||||
|
||||
(conditions || []).forEach((cond) => {
|
||||
if (cond?.enabled === false) return;
|
||||
|
||||
const op = (cond?.op || '').trim();
|
||||
const column = (cond?.column || '').trim();
|
||||
const value = (cond?.value ?? '').toString();
|
||||
const value2 = (cond?.value2 ?? '').toString();
|
||||
const logic = normalizeConditionLogic(cond?.logic);
|
||||
|
||||
const appendWherePart = (expr: string) => {
|
||||
const normalizedExpr = String(expr || '').trim();
|
||||
if (!normalizedExpr) return;
|
||||
whereParts.push({ expr: normalizedExpr, logic });
|
||||
};
|
||||
|
||||
if (op === 'CUSTOM') {
|
||||
const expr = value.trim();
|
||||
if (expr) whereParts.push(`(${expr})`);
|
||||
if (expr) appendWherePart(`(${expr})`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -90,80 +218,80 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
|
||||
switch (op) {
|
||||
case 'IS_NULL':
|
||||
whereParts.push(`${col} IS NULL`);
|
||||
appendWherePart(`${col} IS NULL`);
|
||||
return;
|
||||
case 'IS_NOT_NULL':
|
||||
whereParts.push(`${col} IS NOT NULL`);
|
||||
appendWherePart(`${col} IS NOT NULL`);
|
||||
return;
|
||||
case 'IS_EMPTY':
|
||||
// 兼容:空值通常理解为 NULL 或空字符串
|
||||
whereParts.push(`(${col} IS NULL OR ${col} = '')`);
|
||||
appendWherePart(`(${col} IS NULL OR ${col} = '')`);
|
||||
return;
|
||||
case 'IS_NOT_EMPTY':
|
||||
whereParts.push(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
appendWherePart(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
return;
|
||||
case 'BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} IN (${list})`);
|
||||
appendWherePart(`${col} IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} NOT IN (${list})`);
|
||||
appendWherePart(`${col} NOT IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case '=':
|
||||
@@ -174,7 +302,7 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
case '>=': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
@@ -182,17 +310,23 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
if (op.toUpperCase() === 'LIKE') {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : '';
|
||||
};
|
||||
if (whereParts.length === 0) return '';
|
||||
|
||||
let whereExpr = `(${whereParts[0].expr})`;
|
||||
for (let i = 1; i < whereParts.length; i++) {
|
||||
const part = whereParts[i];
|
||||
whereExpr = `(${whereExpr} ${part.logic} (${part.expr}))`;
|
||||
}
|
||||
return `WHERE ${whereExpr}`;
|
||||
};
|
||||
|
||||
2
frontend/src/vite-env.d.ts
vendored
Normal file
2
frontend/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
@@ -12,4 +12,4 @@ export default defineConfig({
|
||||
outDir: 'dist', // Standard Wails output directory
|
||||
emptyOutDir: true,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
79
frontend/wailsjs/go/app/App.d.ts
vendored
79
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -1,13 +1,24 @@
|
||||
// Cynhyrchwyd y ffeil hon yn awtomatig. PEIDIWCH Â MODIWL
|
||||
// This file is automatically generated. DO NOT EDIT
|
||||
import {connection} from '../models';
|
||||
import {time} from '../models';
|
||||
import {sync} from '../models';
|
||||
import {redis} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
export function CancelQuery(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckDriverNetworkStatus():Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckForUpdates():Promise<connection.QueryResult>;
|
||||
|
||||
export function CleanupStaleQueries(arg1:time.Duration):Promise<void>;
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ConfigureGlobalProxy(arg1:boolean,arg2:connection.ProxyConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function CreateDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -28,6 +39,10 @@ export function DBGetTriggers(arg1:connection.ConnectionConfig,arg2:string,arg3:
|
||||
|
||||
export function DBQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryIsolated(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryWithCancel(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
@@ -36,8 +51,18 @@ export function DataSyncAnalyze(arg1:sync.SyncConfig):Promise<connection.QueryRe
|
||||
|
||||
export function DataSyncPreview(arg1:sync.SyncConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function DownloadDriverPackage(arg1:string,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DownloadUpdate():Promise<connection.QueryResult>;
|
||||
|
||||
export function DropDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropFunction(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropView(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportData(arg1:Array<Record<string, any>>,arg2:Array<string>,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportDatabaseSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:boolean):Promise<connection.QueryResult>;
|
||||
@@ -46,16 +71,34 @@ export function ExportQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:st
|
||||
|
||||
export function ExportTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTablesDataSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTablesSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>,arg4:boolean):Promise<connection.QueryResult>;
|
||||
|
||||
export function GenerateQueryID():Promise<string>;
|
||||
|
||||
export function GetAppInfo():Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverStatusList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverVersionList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverVersionPackageSize(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetGlobalProxyConfig():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportConfigFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportData(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportDataWithProgress(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function InstallLocalDriverPackage(arg1:string,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function InstallUpdateAndRestart():Promise<connection.QueryResult>;
|
||||
|
||||
export function MongoDiscoverMembers(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function MySQLConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function MySQLGetDatabases(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -66,8 +109,12 @@ export function MySQLQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:str
|
||||
|
||||
export function MySQLShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenDownloadedUpdateDirectory():Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenSQLFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function PreviewImportFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisDeleteHashField(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
@@ -90,7 +137,7 @@ export function RedisListSet(arg1:connection.ConnectionConfig,arg2:string,arg3:n
|
||||
|
||||
export function RedisRenameKey(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:number):Promise<connection.QueryResult>;
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:any,arg4:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSelectDB(arg1:connection.ConnectionConfig,arg2:number):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -104,10 +151,40 @@ export function RedisSetString(arg1:connection.ConnectionConfig,arg2:string,arg3
|
||||
|
||||
export function RedisSetTTL(arg1:connection.ConnectionConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisStreamAdd(arg1:connection.ConnectionConfig,arg2:string,arg3:Record<string, string>,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisStreamDelete(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisTestConnection(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisZSetAdd(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<redis.ZSetMember>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisZSetRemove(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RemoveDriverPackage(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameDatabase(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameView(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ResolveDriverDownloadDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ResolveDriverPackageDownloadURL(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ResolveDriverRepositoryURL(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDatabaseFile(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectSSHKeyFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SetWindowTranslucency(arg1:number,arg2:number):Promise<void>;
|
||||
|
||||
export function TestConnection(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -6,10 +6,30 @@ export function ApplyChanges(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ApplyChanges'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function CancelQuery(arg1) {
|
||||
return window['go']['app']['App']['CancelQuery'](arg1);
|
||||
}
|
||||
|
||||
export function CheckDriverNetworkStatus() {
|
||||
return window['go']['app']['App']['CheckDriverNetworkStatus']();
|
||||
}
|
||||
|
||||
export function CheckForUpdates() {
|
||||
return window['go']['app']['App']['CheckForUpdates']();
|
||||
}
|
||||
|
||||
export function CleanupStaleQueries(arg1) {
|
||||
return window['go']['app']['App']['CleanupStaleQueries'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1) {
|
||||
return window['go']['app']['App']['ConfigureDriverRuntimeDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureGlobalProxy(arg1, arg2) {
|
||||
return window['go']['app']['App']['ConfigureGlobalProxy'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function CreateDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['CreateDatabase'](arg1, arg2);
|
||||
}
|
||||
@@ -50,6 +70,14 @@ export function DBQuery(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQuery'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryIsolated(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQueryIsolated'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryWithCancel(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DBQueryWithCancel'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DBShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
@@ -66,10 +94,30 @@ export function DataSyncPreview(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DataSyncPreview'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DownloadDriverPackage(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DownloadDriverPackage'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DownloadUpdate() {
|
||||
return window['go']['app']['App']['DownloadUpdate']();
|
||||
}
|
||||
|
||||
export function DropDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['DropDatabase'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function DropFunction(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DropFunction'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DropTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DropTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DropView(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DropView'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportData(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportData'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
@@ -86,14 +134,38 @@ export function ExportTable(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTable'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function ExportTablesDataSQL(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['ExportTablesDataSQL'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportTablesSQL(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTablesSQL'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function GenerateQueryID() {
|
||||
return window['go']['app']['App']['GenerateQueryID']();
|
||||
}
|
||||
|
||||
export function GetAppInfo() {
|
||||
return window['go']['app']['App']['GetAppInfo']();
|
||||
}
|
||||
|
||||
export function GetDriverStatusList(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverStatusList'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetDriverVersionList(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverVersionList'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetDriverVersionPackageSize(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverVersionPackageSize'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetGlobalProxyConfig() {
|
||||
return window['go']['app']['App']['GetGlobalProxyConfig']();
|
||||
}
|
||||
|
||||
export function ImportConfigFile() {
|
||||
return window['go']['app']['App']['ImportConfigFile']();
|
||||
}
|
||||
@@ -102,10 +174,22 @@ export function ImportData(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['ImportData'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ImportDataWithProgress(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ImportDataWithProgress'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function InstallLocalDriverPackage(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['InstallLocalDriverPackage'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function InstallUpdateAndRestart() {
|
||||
return window['go']['app']['App']['InstallUpdateAndRestart']();
|
||||
}
|
||||
|
||||
export function MongoDiscoverMembers(arg1) {
|
||||
return window['go']['app']['App']['MongoDiscoverMembers'](arg1);
|
||||
}
|
||||
|
||||
export function MySQLConnect(arg1) {
|
||||
return window['go']['app']['App']['MySQLConnect'](arg1);
|
||||
}
|
||||
@@ -126,10 +210,18 @@ export function MySQLShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['MySQLShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function OpenDownloadedUpdateDirectory() {
|
||||
return window['go']['app']['App']['OpenDownloadedUpdateDirectory']();
|
||||
}
|
||||
|
||||
export function OpenSQLFile() {
|
||||
return window['go']['app']['App']['OpenSQLFile']();
|
||||
}
|
||||
|
||||
export function PreviewImportFile(arg1) {
|
||||
return window['go']['app']['App']['PreviewImportFile'](arg1);
|
||||
}
|
||||
|
||||
export function RedisConnect(arg1) {
|
||||
return window['go']['app']['App']['RedisConnect'](arg1);
|
||||
}
|
||||
@@ -202,6 +294,14 @@ export function RedisSetTTL(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisSetTTL'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisStreamAdd(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisStreamAdd'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisStreamDelete(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisStreamDelete'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisTestConnection(arg1) {
|
||||
return window['go']['app']['App']['RedisTestConnection'](arg1);
|
||||
}
|
||||
@@ -214,6 +314,58 @@ export function RedisZSetRemove(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisZSetRemove'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RemoveDriverPackage(arg1, arg2) {
|
||||
return window['go']['app']['App']['RemoveDriverPackage'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RenameDatabase(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RenameDatabase'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RenameTable(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RenameTable'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RenameView(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RenameView'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function ResolveDriverDownloadDirectory(arg1) {
|
||||
return window['go']['app']['App']['ResolveDriverDownloadDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function ResolveDriverPackageDownloadURL(arg1, arg2) {
|
||||
return window['go']['app']['App']['ResolveDriverPackageDownloadURL'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function ResolveDriverRepositoryURL(arg1) {
|
||||
return window['go']['app']['App']['ResolveDriverRepositoryURL'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDatabaseFile(arg1, arg2) {
|
||||
return window['go']['app']['App']['SelectDatabaseFile'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverDownloadDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageFile(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageFile'](arg1);
|
||||
}
|
||||
|
||||
export function SelectSSHKeyFile(arg1) {
|
||||
return window['go']['app']['App']['SelectSSHKeyFile'](arg1);
|
||||
}
|
||||
|
||||
export function SetWindowTranslucency(arg1, arg2) {
|
||||
return window['go']['app']['App']['SetWindowTranslucency'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function TestConnection(arg1) {
|
||||
return window['go']['app']['App']['TestConnection'](arg1);
|
||||
}
|
||||
|
||||
@@ -48,6 +48,44 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
export class HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new HTTPTunnelConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.host = source["host"];
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
}
|
||||
}
|
||||
export class ProxyConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ProxyConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.type = source["type"];
|
||||
this.host = source["host"];
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
}
|
||||
}
|
||||
export class SSHConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
@@ -74,13 +112,34 @@ export namespace connection {
|
||||
port: number;
|
||||
user: string;
|
||||
password: string;
|
||||
savePassword?: boolean;
|
||||
database: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: string;
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number;
|
||||
uri?: string;
|
||||
hosts?: string[];
|
||||
topology?: string;
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
authSource?: string;
|
||||
readPreference?: string;
|
||||
mongoSrv?: boolean;
|
||||
mongoAuthMechanism?: string;
|
||||
mongoReplicaUser?: string;
|
||||
mongoReplicaPassword?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ConnectionConfig(source);
|
||||
@@ -93,13 +152,34 @@ export namespace connection {
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
this.savePassword = source["savePassword"];
|
||||
this.database = source["database"];
|
||||
this.useSSL = source["useSSL"];
|
||||
this.sslMode = source["sslMode"];
|
||||
this.sslCertPath = source["sslCertPath"];
|
||||
this.sslKeyPath = source["sslKeyPath"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.useProxy = source["useProxy"];
|
||||
this.proxy = this.convertValues(source["proxy"], ProxyConfig);
|
||||
this.useHttpTunnel = source["useHttpTunnel"];
|
||||
this.httpTunnel = this.convertValues(source["httpTunnel"], HTTPTunnelConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
this.redisDB = source["redisDB"];
|
||||
this.uri = source["uri"];
|
||||
this.hosts = source["hosts"];
|
||||
this.topology = source["topology"];
|
||||
this.mysqlReplicaUser = source["mysqlReplicaUser"];
|
||||
this.mysqlReplicaPassword = source["mysqlReplicaPassword"];
|
||||
this.replicaSet = source["replicaSet"];
|
||||
this.authSource = source["authSource"];
|
||||
this.readPreference = source["readPreference"];
|
||||
this.mongoSrv = source["mongoSrv"];
|
||||
this.mongoAuthMechanism = source["mongoAuthMechanism"];
|
||||
this.mongoReplicaUser = source["mongoReplicaUser"];
|
||||
this.mongoReplicaPassword = source["mongoReplicaPassword"];
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
@@ -120,11 +200,14 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class QueryResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: any;
|
||||
fields?: string[];
|
||||
queryId?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new QueryResult(source);
|
||||
@@ -136,6 +219,7 @@ export namespace connection {
|
||||
this.message = source["message"];
|
||||
this.data = source["data"];
|
||||
this.fields = source["fields"];
|
||||
this.queryId = source["queryId"];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,6 +277,9 @@ export namespace sync {
|
||||
mode: string;
|
||||
jobId?: string;
|
||||
autoAddColumns?: boolean;
|
||||
targetTableStrategy?: string;
|
||||
createIndexes?: boolean;
|
||||
mongoCollectionName?: string;
|
||||
tableOptions?: Record<string, TableOptions>;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
@@ -208,6 +295,9 @@ export namespace sync {
|
||||
this.mode = source["mode"];
|
||||
this.jobId = source["jobId"];
|
||||
this.autoAddColumns = source["autoAddColumns"];
|
||||
this.targetTableStrategy = source["targetTableStrategy"];
|
||||
this.createIndexes = source["createIndexes"];
|
||||
this.mongoCollectionName = source["mongoCollectionName"];
|
||||
this.tableOptions = this.convertValues(source["tableOptions"], TableOptions, true);
|
||||
}
|
||||
|
||||
|
||||
65
go.mod
65
go.mod
@@ -5,27 +5,58 @@ go 1.24.3
|
||||
require (
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3
|
||||
gitee.com/chunanyong/dm v1.8.22
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/microsoft/go-mssqldb v1.9.6
|
||||
github.com/redis/go-redis/v9 v9.17.3
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/taosdata/driver-go/v3 v3.7.8
|
||||
github.com/wailsapp/wails/v2 v2.11.0
|
||||
github.com/xuri/excelize/v2 v2.10.0
|
||||
go.mongodb.org/mongo-driver v1.17.9
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/mod v0.32.0
|
||||
golang.org/x/net v0.49.0
|
||||
golang.org/x/text v0.33.0
|
||||
modernc.org/sqlite v1.44.3
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.71.0 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.5.1 // indirect
|
||||
github.com/bep/debounce v1.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/duckdb/duckdb-go-bindings v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-amd64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-arm64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-amd64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-arm64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/windows-amd64 v0.3.3 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/flatbuffers v25.12.19+incompatible // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.18.3 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/labstack/echo/v4 v4.13.3 // indirect
|
||||
github.com/labstack/gommon v0.4.2 // indirect
|
||||
github.com/leaanthony/go-ansi-parser v1.6.1 // indirect
|
||||
@@ -34,22 +65,46 @@ require (
|
||||
github.com/leaanthony/u v1.1.1 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/paulmach/orb v0.12.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.25 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/richardlehane/mscfb v1.0.4 // indirect
|
||||
github.com/richardlehane/msoleps v1.0.4 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/samber/lo v1.49.1 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/tiendc/go-deepcopy v1.7.1 // indirect
|
||||
github.com/tkrajina/go-reflector v0.5.8 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.2 // indirect
|
||||
github.com/wailsapp/go-webview2 v1.0.22 // indirect
|
||||
github.com/wailsapp/mimetype v1.4.1 // indirect
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.2.0 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/xuri/efp v0.0.1 // indirect
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/zeebo/xxh3 v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/text v0.33.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
modernc.org/libc v1.67.6 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/highgo/pq-sm3 => ./third_party/highgo-pq
|
||||
|
||||
248
go.sum
248
go.sum
@@ -4,6 +4,28 @@ gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3 h1:QjslQNaH5Nuap5i4ni
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3/go.mod h1:7lH5A1jzCXD9Nl16DzaBUOfDAT8NPrDmZwKu1p5wf94=
|
||||
gitee.com/chunanyong/dm v1.8.22 h1:H7fsrnUIvEA0jlDWew7vwELry1ff+tLMIu2Fk2cIBSg=
|
||||
gitee.com/chunanyong/dm v1.8.22/go.mod h1:EPRJnuPFgbyOFgJ0TRYCTGzhq+ZT4wdyaj/GW/LLcNg=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZba3YZqeTNJPtvqZoBu1sBN/L4sry+u2U3Y75w=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/ClickHouse/ch-go v0.71.0 h1:bUdZ/EZj/LcVHsMqaRUP2holqygrPWQKeMjc6nZoyRM=
|
||||
github.com/ClickHouse/ch-go v0.71.0/go.mod h1:NwbNc+7jaqfY58dmdDUbG4Jl22vThgx1cYjBw0vtgXw=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0 h1:fUR05TrF1GyvLDa/mAQjkx7KbgwdLRffs2n9O3WobtE=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0/go.mod h1:o6jf7JM/zveWC/PP277BLxjHy5KjnGX/jfljhM4s34g=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/apache/arrow-go/v18 v18.5.1 h1:yaQ6zxMGgf9YCYw4/oaeOU3AULySDlAYDOcnr4LdHdI=
|
||||
github.com/apache/arrow-go/v18 v18.5.1/go.mod h1:OCCJsmdq8AsRm8FkBSSmYTwL/s4zHW9CqxeBxEytkNE=
|
||||
github.com/apache/thrift v0.22.0 h1:r7mTJdj51TMDe6RtcmNdQxgn9XcyfGDOzegMDRg47uc=
|
||||
github.com/apache/thrift v0.22.0/go.mod h1:1e7J/O1Ae6ZQMTYdy9xa3w9k+XHWPfRvdPyJeynQ+/g=
|
||||
github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY=
|
||||
github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
@@ -12,31 +34,92 @@ github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/duckdb/duckdb-go-bindings v0.3.3 h1:lXogtCY8hiGLQvTfK55HcgvaA3K2MrwKeZGqhIin35U=
|
||||
github.com/duckdb/duckdb-go-bindings v0.3.3/go.mod h1:zS7OpBP8zwVlP38OljRZOnqWYlNd4KLcVfMoA1JFzpk=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-amd64 v0.3.3 h1:ue8BtIOSt+2Bt2fEfTAvBcQLxzBFhgfCcyzPtqQWTRA=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-amd64 v0.3.3/go.mod h1:EnAvZh1kNJHp5yF+M1ZHNEvapnmt6anq1xXHVrAGqMo=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-arm64 v0.3.3 h1:2TrSeTgtwi3WIvub9ba0mny+AClSNo1w0Ghszc2B8lQ=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/darwin-arm64 v0.3.3/go.mod h1:IGLSeEcFhNeZF16aVjQCULD7TsFZKG5G7SyKJAXKp5c=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-amd64 v0.3.3 h1:GN0cexhfE7uLb7qgDmsYG324wKF15nW+O7v5+NGalS4=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-amd64 v0.3.3/go.mod h1:KAIynZ0GHCS7X5fRyuFnQMg/SZBPK/bS9OCOVojClxw=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-arm64 v0.3.3 h1:bIJV+ct6yvMXjy+N3bfILFd0fkTK50AUhUTerkY40/8=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-arm64 v0.3.3/go.mod h1:81SGOYoEUs8qaAfSk1wRfM5oobrIJ5KI7AzYhK6/bvQ=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/windows-amd64 v0.3.3 h1:SK2sunA/MPb2T3113iFzHv6DWeu+qrsw0DizTFrvM+Q=
|
||||
github.com/duckdb/duckdb-go-bindings/lib/windows-amd64 v0.3.3/go.mod h1:K25pJL26ARblGDeuAkrdblFvUen92+CwksLtPEHRqqQ=
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5 h1:TlK8ipnzoKW2aNrjGqRkFWLCDpJDxR/VwH8ezEcvVhw=
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5/go.mod h1:6uIbC3gz36NCEygECzboygOo/Z9TeVwox/puG+ohWV0=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
|
||||
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro=
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
|
||||
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible h1:haMV2JRRJCe1998HeW/p0X9UaMTK6SDo0ffLn2+DbLs=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
|
||||
github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e h1:Q3+PugElBCf4PFpxhErSzU3/PY5sFL5Z6rfv4AbGAck=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw=
|
||||
github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/labstack/echo/v4 v4.13.3 h1:pwhpCPrTl5qry5HRdM5FwdXnhXSLSY+WE+YQSeCaafY=
|
||||
github.com/labstack/echo/v4 v4.13.3/go.mod h1:o90YNEeQWjDozo584l7AwhJMHN0bOC4tAfg+Xox9q5g=
|
||||
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
|
||||
@@ -61,27 +144,73 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/microsoft/go-mssqldb v1.9.6 h1:1MNQg5UiSsokiPz3++K2KPx4moKrwIqly1wv+RyCKTw=
|
||||
github.com/microsoft/go-mssqldb v1.9.6/go.mod h1:yYMPDufyoF2vVuVCUGtZARr06DKFIhMrluTcgWlXpr4=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI=
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/paulmach/orb v0.12.0 h1:z+zOwjmG3MyEEqzv92UN49Lg1JFYx0L9GpGKNVDKk1s=
|
||||
github.com/paulmach/orb v0.12.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
|
||||
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
|
||||
github.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0=
|
||||
github.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/redis/go-redis/v9 v9.17.3 h1:fN29NdNrE17KttK5Ndf20buqfDZwGNgoUr9qjl1DQx4=
|
||||
github.com/redis/go-redis/v9 v9.17.3/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM=
|
||||
github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk=
|
||||
github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||
github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM/9/g00=
|
||||
github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
|
||||
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYoCqhR2dU=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8 h1:N2H6HLLZH2ve2ipcoFgG9BJS+yW0XksqNYwEdSmHaJk=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8/go.mod h1:gSxBEPOueMg0rTmMO1Ug6aeD7AwGdDGvUtLrsDTTpYc=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tiendc/go-deepcopy v1.7.1 h1:LnubftI6nYaaMOcaz0LphzwraqN8jiWTwm416sitff4=
|
||||
github.com/tiendc/go-deepcopy v1.7.1/go.mod h1:4bKjNC2r7boYOkD2IOuZpYjmlDdzjbpTRyCx+goBCJQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8 h1:yPADHrwmUbMq4RGEyaOUpz2H90sRsETNVpjzo3DLVQQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
@@ -94,35 +223,126 @@ github.com/wailsapp/mimetype v1.4.1 h1:pQN9ycO7uo4vsUUuPeHEYoUkLVkaRntMnHJxVwYhw
|
||||
github.com/wailsapp/mimetype v1.4.1/go.mod h1:9aV5k31bBOv5z6u+QP8TltzvNGJPmNJD4XlAL3U+j3o=
|
||||
github.com/wailsapp/wails/v2 v2.11.0 h1:seLacV8pqupq32IjS4Y7V8ucab0WZwtK6VvUVxSBtqQ=
|
||||
github.com/wailsapp/wails/v2 v2.11.0/go.mod h1:jrf0ZaM6+GBc1wRmXsM8cIvzlg0karYin3erahI4+0k=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=
|
||||
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/xuri/efp v0.0.1 h1:fws5Rv3myXyYni8uwj2qKjVaRP30PdjeYe2Y6FDsCL8=
|
||||
github.com/xuri/efp v0.0.1/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI=
|
||||
github.com/xuri/excelize/v2 v2.10.0 h1:8aKsP7JD39iKLc6dH5Tw3dgV3sPRh8uRVXu/fMstfW4=
|
||||
github.com/xuri/excelize/v2 v2.10.0/go.mod h1:SC5TzhQkaOsTWpANfm+7bJCldzcnU/jrhqkTi/iBHBU=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 h1:+C0TIdyyYmzadGaL/HBLbf3WdLgC29pgyhTjAT/0nuE=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.1.0 h1:s7DLGDK45Dyfg7++yxI0khrfwq9661w9EN78eP/UZVs=
|
||||
github.com/zeebo/xxh3 v1.1.0/go.mod h1:IisAie1LELR4xhVinxWS5+zf1lA4p0MW4T+w+W07F5s=
|
||||
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
||||
go.mongodb.org/mongo-driver v1.17.9 h1:IexDdCuuNJ3BHrELgBlyaH9p60JXAvdzWR128q+U5tU=
|
||||
go.mongodb.org/mongo-driver v1.17.9/go.mod h1:LlOhpH5NUEfhxcAwG0UEkMqwYcc4JU18gtCdGudk/tQ=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU=
|
||||
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
|
||||
golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8JzDTPXCzhN56OLJ+IhHY8U3A=
|
||||
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||
|
||||
@@ -8,6 +8,8 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
@@ -15,6 +17,8 @@ import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const dbCachePingInterval = 30 * time.Second
|
||||
@@ -24,19 +28,27 @@ type cachedDatabase struct {
|
||||
lastPing time.Time
|
||||
}
|
||||
|
||||
type queryContext struct {
|
||||
cancel context.CancelFunc
|
||||
started time.Time
|
||||
}
|
||||
|
||||
// App struct
|
||||
type App struct {
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
queryMu sync.RWMutex
|
||||
runningQueries map[string]queryContext // queryID -> cancelFunc and start time
|
||||
}
|
||||
|
||||
// NewApp creates a new App application struct
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
runningQueries: make(map[string]queryContext),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,9 +57,17 @@ func NewApp() *App {
|
||||
func (a *App) Startup(ctx context.Context) {
|
||||
a.ctx = ctx
|
||||
logger.Init()
|
||||
applyMacWindowTranslucencyFix()
|
||||
logger.Infof("应用启动完成")
|
||||
}
|
||||
|
||||
// SetWindowTranslucency 动态调整 macOS 窗口透明度。
|
||||
// 前端在加载用户外观设置后、以及用户修改外观时调用此方法。
|
||||
// opacity=1.0 且 blur=0 时窗口标记为 opaque,GPU 不再持续计算窗口背后的模糊合成。
|
||||
func (a *App) SetWindowTranslucency(opacity float64, blur float64) {
|
||||
setMacWindowTranslucency(opacity, blur)
|
||||
}
|
||||
|
||||
// Shutdown is called when the app terminates
|
||||
func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Infof("应用开始关闭,准备释放资源")
|
||||
@@ -58,31 +78,149 @@ func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Error(err, "关闭数据库连接失败")
|
||||
}
|
||||
}
|
||||
proxytunnel.CloseAllForwarders()
|
||||
// Close all Redis connections
|
||||
CloseAllRedisClients()
|
||||
logger.Infof("资源释放完成,应用已关闭")
|
||||
logger.Close()
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
func normalizeCacheKeyConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := config
|
||||
normalized.Type = strings.ToLower(strings.TrimSpace(normalized.Type))
|
||||
// timeout 仅用于 Query/Ping 控制,不应作为物理连接复用键的一部分。
|
||||
normalized.Timeout = 0
|
||||
normalized.SavePassword = false
|
||||
|
||||
if !normalized.UseSSH {
|
||||
normalized.SSH = connection.SSHConfig{}
|
||||
}
|
||||
// 保持与驱动默认一致,避免同一连接被重复缓存
|
||||
if config.Type == "postgres" && config.Database == "" {
|
||||
config.Database = "postgres"
|
||||
if !normalized.UseProxy {
|
||||
normalized.Proxy = connection.ProxyConfig{}
|
||||
}
|
||||
if !normalized.UseHTTPTunnel {
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
b, _ := json.Marshal(config)
|
||||
if isFileDatabaseType(normalized.Type) {
|
||||
dsn := strings.TrimSpace(normalized.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(normalized.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
|
||||
// DuckDB/SQLite 仅基于文件来源识别连接,其他网络字段不参与键计算。
|
||||
normalized.Host = dsn
|
||||
normalized.Database = ""
|
||||
normalized.Port = 0
|
||||
normalized.User = ""
|
||||
normalized.Password = ""
|
||||
normalized.URI = ""
|
||||
normalized.Hosts = nil
|
||||
normalized.Topology = ""
|
||||
normalized.MySQLReplicaUser = ""
|
||||
normalized.MySQLReplicaPassword = ""
|
||||
normalized.ReplicaSet = ""
|
||||
normalized.AuthSource = ""
|
||||
normalized.ReadPreference = ""
|
||||
normalized.MongoSRV = false
|
||||
normalized.MongoAuthMechanism = ""
|
||||
normalized.MongoReplicaUser = ""
|
||||
normalized.MongoReplicaPassword = ""
|
||||
normalized.UseHTTPTunnel = false
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
func resolveFileDatabaseDSN(config connection.ConnectionConfig) string {
|
||||
dsn := strings.TrimSpace(config.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(config.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
return dsn
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func shortCacheKey(cacheKey string) string {
|
||||
shortKey := cacheKey
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
return shortKey
|
||||
}
|
||||
|
||||
func shouldRefreshCachedConnection(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
normalized := strings.ToLower(normalizeErrorMessage(err))
|
||||
if normalized == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
patterns := []string{
|
||||
"invalid connection",
|
||||
"bad connection",
|
||||
"database is closed",
|
||||
"connection is already closed",
|
||||
"use of closed network connection",
|
||||
"broken pipe",
|
||||
"connection reset by peer",
|
||||
"server has gone away",
|
||||
"eof",
|
||||
}
|
||||
for _, pattern := range patterns {
|
||||
if strings.Contains(normalized, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *App) invalidateCachedDatabase(config connection.ConnectionConfig, reason error) bool {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := shortCacheKey(key)
|
||||
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
|
||||
entry, exists := a.dbCache[key]
|
||||
if !exists || entry.inst == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if closeErr := entry.inst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
}
|
||||
delete(a.dbCache, key)
|
||||
if reason != nil {
|
||||
logger.Errorf("检测到连接失效,已清理缓存连接:%s 缓存Key=%s 原因=%s", formatConnSummary(effectiveConfig), shortKey, normalizeErrorMessage(reason))
|
||||
} else {
|
||||
logger.Infof("已清理缓存连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
err = sanitizeMongoConnectErrorLabel(config, err)
|
||||
|
||||
var netErr net.Error
|
||||
if errors.Is(err, context.DeadlineExceeded) || (errors.As(err, &netErr) && netErr.Timeout()) {
|
||||
@@ -96,16 +234,89 @@ func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
return withLogHint{err: err, logPath: logger.Path()}
|
||||
}
|
||||
|
||||
type errorMessageOverride struct {
|
||||
message string
|
||||
cause error
|
||||
}
|
||||
|
||||
func (e errorMessageOverride) Error() string {
|
||||
return e.message
|
||||
}
|
||||
|
||||
func (e errorMessageOverride) Unwrap() error {
|
||||
return e.cause
|
||||
}
|
||||
|
||||
func sanitizeMongoConnectErrorLabel(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
if strings.ToLower(strings.TrimSpace(config.Type)) != "mongodb" {
|
||||
return err
|
||||
}
|
||||
if mongoConnectUsesTLS(config) {
|
||||
return err
|
||||
}
|
||||
original := err.Error()
|
||||
rewritten := strings.ReplaceAll(original, "SSL 主库凭据", "主库凭据")
|
||||
rewritten = strings.ReplaceAll(rewritten, "SSL 从库凭据", "从库凭据")
|
||||
if rewritten == original {
|
||||
return err
|
||||
}
|
||||
return errorMessageOverride{
|
||||
message: rewritten,
|
||||
cause: err,
|
||||
}
|
||||
}
|
||||
|
||||
func mongoConnectUsesTLS(config connection.ConnectionConfig) bool {
|
||||
if config.UseSSL {
|
||||
return true
|
||||
}
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return false
|
||||
}
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, key := range []string{"tls", "ssl"} {
|
||||
if enabled, known := parseMongoBool(parsed.Query().Get(key)); known {
|
||||
return enabled
|
||||
}
|
||||
}
|
||||
return strings.EqualFold(strings.TrimSpace(parsed.Scheme), "mongodb+srv")
|
||||
}
|
||||
|
||||
func parseMongoBool(raw string) (enabled bool, known bool) {
|
||||
value := strings.ToLower(strings.TrimSpace(raw))
|
||||
switch value {
|
||||
case "1", "true", "t", "yes", "y", "on", "required":
|
||||
return true, true
|
||||
case "0", "false", "f", "no", "n", "off", "disable", "disabled":
|
||||
return false, true
|
||||
default:
|
||||
return false, false
|
||||
}
|
||||
}
|
||||
|
||||
type withLogHint struct {
|
||||
err error
|
||||
logPath string
|
||||
}
|
||||
|
||||
func (e withLogHint) Error() string {
|
||||
if strings.TrimSpace(e.logPath) == "" {
|
||||
return e.err.Error()
|
||||
message := normalizeErrorMessage(e.err)
|
||||
path := strings.TrimSpace(e.logPath)
|
||||
if path == "" {
|
||||
return message
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", e.err.Error(), e.logPath)
|
||||
info, statErr := os.Stat(path)
|
||||
if statErr != nil || info.IsDir() || info.Size() <= 0 {
|
||||
return message
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", message, path)
|
||||
}
|
||||
|
||||
func (e withLogHint) Unwrap() error {
|
||||
@@ -124,12 +335,60 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString(fmt.Sprintf("类型=%s 地址=%s:%d 数据库=%s 用户=%s 超时=%ds",
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds))
|
||||
normalizedType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if normalizedType == "sqlite" || normalizedType == "duckdb" {
|
||||
path := strings.TrimSpace(config.Host)
|
||||
if path == "" {
|
||||
path = "(未配置)"
|
||||
}
|
||||
b.WriteString(fmt.Sprintf("类型=%s 路径=%s 超时=%ds", config.Type, path, timeoutSeconds))
|
||||
} else {
|
||||
b.WriteString(fmt.Sprintf("类型=%s 地址=%s:%d 数据库=%s 用户=%s 超时=%ds",
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds))
|
||||
}
|
||||
|
||||
if len(config.Hosts) > 0 {
|
||||
b.WriteString(fmt.Sprintf(" 节点数=%d", len(config.Hosts)))
|
||||
}
|
||||
if strings.TrimSpace(config.Topology) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 拓扑=%s", strings.TrimSpace(config.Topology)))
|
||||
}
|
||||
if strings.TrimSpace(config.URI) != "" {
|
||||
b.WriteString(fmt.Sprintf(" URI=已配置(长度=%d)", len(config.URI)))
|
||||
}
|
||||
if strings.TrimSpace(config.MySQLReplicaUser) != "" {
|
||||
b.WriteString(" MySQL从库凭据=已配置")
|
||||
}
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "mongodb") {
|
||||
if strings.TrimSpace(config.MongoReplicaUser) != "" {
|
||||
b.WriteString(" Mongo从库凭据=已配置")
|
||||
}
|
||||
if strings.TrimSpace(config.ReplicaSet) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 副本集=%s", strings.TrimSpace(config.ReplicaSet)))
|
||||
}
|
||||
if strings.TrimSpace(config.ReadPreference) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 读偏好=%s", strings.TrimSpace(config.ReadPreference)))
|
||||
}
|
||||
if strings.TrimSpace(config.AuthSource) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 认证库=%s", strings.TrimSpace(config.AuthSource)))
|
||||
}
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(fmt.Sprintf(" SSH=%s:%d 用户=%s", config.SSH.Host, config.SSH.Port, config.SSH.User))
|
||||
}
|
||||
if config.UseProxy {
|
||||
b.WriteString(fmt.Sprintf(" 代理=%s://%s:%d", strings.ToLower(strings.TrimSpace(config.Proxy.Type)), config.Proxy.Host, config.Proxy.Port))
|
||||
if strings.TrimSpace(config.Proxy.User) != "" {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(fmt.Sprintf(" HTTP隧道=%s:%d", strings.TrimSpace(config.HTTPTunnel.Host), config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
if config.Type == "custom" {
|
||||
driver := strings.TrimSpace(config.Driver)
|
||||
@@ -155,17 +414,69 @@ func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, erro
|
||||
return a.getDatabaseWithPing(config, false)
|
||||
}
|
||||
|
||||
func (a *App) openDatabaseIsolated(config connection.ConnectionConfig) (db.Database, error) {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
return nil, withLogHint{err: fmt.Errorf("%s", reason), logPath: logger.Path()}
|
||||
}
|
||||
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, proxyErr)
|
||||
}
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, err)
|
||||
}
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing bool) (db.Database, error) {
|
||||
key := getCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
isFileDB := isFileDatabaseType(effectiveConfig.Type)
|
||||
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
if isFileDB {
|
||||
rawDSN := resolveFileDatabaseDSN(effectiveConfig)
|
||||
normalizedDSN := resolveFileDatabaseDSN(normalizeCacheKeyConfig(effectiveConfig))
|
||||
logger.Infof("文件库连接缓存探测:类型=%s 原始DSN=%s 归一化DSN=%s timeout=%ds forcePing=%t 缓存Key=%s",
|
||||
strings.TrimSpace(effectiveConfig.Type), rawDSN, normalizedDSN, effectiveConfig.Timeout, forcePing, shortKey)
|
||||
}
|
||||
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
// Best-effort cleanup: if cached instance exists for this exact config, close it.
|
||||
a.mu.Lock()
|
||||
if cur, exists := a.dbCache[key]; exists && cur.inst != nil {
|
||||
_ = cur.inst.Close()
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
a.mu.Unlock()
|
||||
return nil, withLogHint{err: fmt.Errorf("%s", reason), logPath: logger.Path()}
|
||||
}
|
||||
|
||||
a.mu.RLock()
|
||||
entry, ok := a.dbCache[key]
|
||||
a.mu.RUnlock()
|
||||
if ok {
|
||||
if isFileDB {
|
||||
logger.Infof("命中文件库连接缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
needPing := forcePing
|
||||
if !needPing {
|
||||
lastPing := entry.lastPing
|
||||
@@ -175,6 +486,9 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
}
|
||||
|
||||
if !needPing {
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(免 Ping):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
}
|
||||
|
||||
@@ -186,9 +500,12 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.dbCache[key] = cur
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(Ping 成功):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
|
||||
// Ping failed: remove cached instance (best effort)
|
||||
@@ -200,19 +517,32 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("文件库缓存连接已剔除,准备新建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
}
|
||||
if isFileDB {
|
||||
logger.Infof("未命中文件库连接缓存,开始创建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(config.Type)
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(config); err != nil {
|
||||
wrapped := wrapConnectError(config, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "连接代理准备失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
@@ -223,11 +553,54 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.mu.Unlock()
|
||||
// Prefer existing cached connection to avoid cache racing duplicates.
|
||||
_ = dbInst.Close()
|
||||
if isFileDB {
|
||||
logger.Infof("并发创建命中已存在文件库连接,关闭新建连接并复用缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return existing.inst, nil
|
||||
}
|
||||
a.dbCache[key] = cachedDatabase{inst: dbInst, lastPing: now}
|
||||
a.mu.Unlock()
|
||||
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
// generateQueryID generates a unique ID for a query using UUID v4
|
||||
func generateQueryID() string {
|
||||
return "query-" + uuid.New().String()
|
||||
}
|
||||
|
||||
// CancelQuery cancels a running query by its ID
|
||||
func (a *App) CancelQuery(queryID string) connection.QueryResult {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
if ctx, exists := a.runningQueries[queryID]; exists {
|
||||
ctx.cancel()
|
||||
delete(a.runningQueries, queryID)
|
||||
logger.Infof("查询已取消:queryID=%s", queryID)
|
||||
return connection.QueryResult{Success: true, Message: "查询已取消"}
|
||||
}
|
||||
logger.Warnf("取消查询失败:queryID=%s 不存在或已完成", queryID)
|
||||
return connection.QueryResult{Success: false, Message: "查询不存在或已完成"}
|
||||
}
|
||||
|
||||
// CleanupStaleQueries removes queries older than maxAge
|
||||
func (a *App) CleanupStaleQueries(maxAge time.Duration) {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
now := time.Now()
|
||||
for id, ctx := range a.runningQueries {
|
||||
if now.Sub(ctx.started) > maxAge {
|
||||
// Query likely finished or stuck, remove from tracking
|
||||
delete(a.runningQueries, id)
|
||||
// Query expired, silently remove
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateQueryID generates a unique query ID for cancellation tracking
|
||||
func (a *App) GenerateQueryID() string {
|
||||
return generateQueryID()
|
||||
}
|
||||
|
||||
63
internal/app/app_cache_key_test.go
Normal file
63
internal/app/app_cache_key_test.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGetCacheKey_IgnoreTimeout(t *testing.T) {
|
||||
base := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `C:\data\songs.duckdb`,
|
||||
Timeout: 30,
|
||||
UseProxy: false,
|
||||
UseSSH: false,
|
||||
}
|
||||
modified := base
|
||||
modified.Timeout = 120
|
||||
|
||||
left := getCacheKey(base)
|
||||
right := getCacheKey(modified)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key when only timeout differs, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_DuckDBHostAndDatabaseEquivalent(t *testing.T) {
|
||||
withHost := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `D:\music\songs.duckdb`,
|
||||
}
|
||||
withDatabase := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Database: `D:\music\songs.duckdb`,
|
||||
}
|
||||
|
||||
left := getCacheKey(withHost)
|
||||
right := getCacheKey(withDatabase)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key for duckdb host/database path, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_KeepDatabaseIsolation(t *testing.T) {
|
||||
a := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Password: "root",
|
||||
Database: "db_a",
|
||||
Timeout: 30,
|
||||
}
|
||||
b := a
|
||||
b.Database = "db_b"
|
||||
b.Timeout = 5
|
||||
|
||||
left := getCacheKey(a)
|
||||
right := getCacheKey(b)
|
||||
if left == right {
|
||||
t.Fatalf("expected different cache key for different database targets")
|
||||
}
|
||||
}
|
||||
84
internal/app/app_connect_error_test.go
Normal file
84
internal/app/app_connect_error_test.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestWrapConnectError_MongoNoSSL_RemovesMisleadingSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to be removed when TLS disabled, got: %s", text)
|
||||
}
|
||||
if !strings.Contains(text, "主库凭据验证失败") {
|
||||
t.Fatalf("expected auth label to remain, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrapConnectError_MongoURIForcesTLS_KeepsSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
URI: "mongodb://user:pass@127.0.0.1:27017/admin?tls=true",
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if !strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to remain when URI enables TLS, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrapConnectError_MongoSRVDefaultTLS_KeepsSSLLabel(t *testing.T) {
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
UseSSL: false,
|
||||
URI: "mongodb+srv://user:pass@cluster0.example.com/admin",
|
||||
}
|
||||
sourceErr := errors.New("MongoDB 连接失败:SSL 主库凭据验证失败: mock error")
|
||||
|
||||
wrapped := wrapConnectError(config, sourceErr)
|
||||
text := wrapped.Error()
|
||||
if !strings.Contains(text, "SSL 主库凭据") {
|
||||
t.Fatalf("expected ssl label to remain for mongodb+srv default TLS, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWithLogHintError_OmitEmptyLogPath(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logPath := filepath.Join(dir, "gonavi.log")
|
||||
if err := os.WriteFile(logPath, nil, 0o644); err != nil {
|
||||
t.Fatalf("write empty log failed: %v", err)
|
||||
}
|
||||
err := withLogHint{err: errors.New("连接失败"), logPath: logPath}
|
||||
text := err.Error()
|
||||
if strings.Contains(text, "详细日志:") {
|
||||
t.Fatalf("expected no log hint for empty file, got: %s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWithLogHintError_IncludeNonEmptyLogPath(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
logPath := filepath.Join(dir, "gonavi.log")
|
||||
if err := os.WriteFile(logPath, []byte("log entry\n"), 0o644); err != nil {
|
||||
t.Fatalf("write log failed: %v", err)
|
||||
}
|
||||
err := withLogHint{err: errors.New("连接失败"), logPath: logPath}
|
||||
text := err.Error()
|
||||
if !strings.Contains(text, "详细日志:"+logPath) {
|
||||
t.Fatalf("expected log hint with path, got: %s", text)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -14,12 +15,17 @@ func normalizeRunConfig(config connection.ConnectionConfig, dbName string) conne
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "mysql", "postgres", "kingbase":
|
||||
// 这些类型的 dbName 表示“数据库”,需要写入连接配置以选择目标库。
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine", "clickhouse":
|
||||
// 这些类型的 dbName 表示"数据库",需要写入连接配置以选择目标库。
|
||||
runConfig.Database = name
|
||||
case "dameng":
|
||||
// 达梦使用 schema 参数,沿用现有行为:dbName 表示 schema。
|
||||
runConfig.Database = name
|
||||
case "redis":
|
||||
runConfig.Database = name
|
||||
if idx, err := strconv.Atoi(name); err == nil && idx >= 0 && idx <= 15 {
|
||||
runConfig.RedisDB = idx
|
||||
}
|
||||
default:
|
||||
// oracle: dbName 表示 schema/owner,不能覆盖 config.Database(服务名)
|
||||
// sqlite: 无需设置 Database
|
||||
@@ -36,6 +42,17 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
dbType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if dbType == "sqlserver" {
|
||||
// SQL Server 的 DB 接口约定:第一个参数是数据库名,schema 由 tableName(如 dbo.users) 自行解析。
|
||||
// 不能把 schema(dbo) 传到第一个参数,否则会拼出 dbo.sys.columns 等无效对象名。
|
||||
targetDB := rawDB
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(config.Database)
|
||||
}
|
||||
return targetDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
@@ -44,13 +61,12 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "postgres", "kingbase":
|
||||
// PG/金仓:dbName 在 UI 里是“数据库”,schema 需从 tableName 或使用默认 public。
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// PG/金仓/瀚高/海量:dbName 在 UI 里是"数据库",schema 需从 tableName 或使用默认 public。
|
||||
return "public", rawTable
|
||||
default:
|
||||
// MySQL:dbName 表示数据库;Oracle/达梦:dbName 表示 schema/owner。
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
51
internal/app/db_context_test.go
Normal file
51
internal/app/db_context_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerKeepsDatabaseAndQualifiedTable(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "master",
|
||||
}, "biz_db", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver first return value as database name, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerFallbackToConfigDatabase(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "biz_db",
|
||||
}, "", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver fallback database from config, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_PostgresStillSplitsQualifiedName(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schema, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "public.orders")
|
||||
|
||||
if schema != "public" || table != "orders" {
|
||||
t.Fatalf("expected postgres qualified split to public.orders, got %q.%q", schema, table)
|
||||
}
|
||||
}
|
||||
233
internal/app/db_proxy.go
Normal file
233
internal/app/db_proxy.go
Normal file
@@ -0,0 +1,233 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.ConnectionConfig, error) {
|
||||
config := raw
|
||||
if config.UseHTTPTunnel {
|
||||
if config.UseProxy {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道与普通代理不能同时启用")
|
||||
}
|
||||
tunnelHost := strings.TrimSpace(config.HTTPTunnel.Host)
|
||||
if tunnelHost == "" {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道主机不能为空")
|
||||
}
|
||||
tunnelPort := config.HTTPTunnel.Port
|
||||
if tunnelPort <= 0 {
|
||||
tunnelPort = 8080
|
||||
}
|
||||
if tunnelPort > 65535 {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道端口无效:%d", config.HTTPTunnel.Port)
|
||||
}
|
||||
|
||||
config.UseProxy = true
|
||||
config.Proxy = connection.ProxyConfig{
|
||||
Type: "http",
|
||||
Host: tunnelHost,
|
||||
Port: tunnelPort,
|
||||
User: strings.TrimSpace(config.HTTPTunnel.User),
|
||||
Password: config.HTTPTunnel.Password,
|
||||
}
|
||||
}
|
||||
if !config.UseProxy {
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(config.Proxy)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Proxy = normalizedProxy
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
|
||||
if config.UseSSH {
|
||||
sshPort := config.SSH.Port
|
||||
if sshPort <= 0 {
|
||||
sshPort = 22
|
||||
}
|
||||
forwardedSSH, err := buildProxyForwardAddress(normalizedProxy, strings.TrimSpace(config.SSH.Host), sshPort)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("代理连接 SSH 网关失败:%w", err)
|
||||
}
|
||||
config.SSH.Host = forwardedSSH.host
|
||||
config.SSH.Port = forwardedSSH.port
|
||||
config.UseProxy = false
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
normalizedType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if normalizedType == "sqlite" || normalizedType == "duckdb" || normalizedType == "custom" {
|
||||
// 文件型/自定义 DSN 类型不走标准 host:port,不在此层改写。
|
||||
return config, nil
|
||||
}
|
||||
if normalizedType == "mongodb" {
|
||||
// MongoDB 统一由驱动侧 Dialer 处理代理,保留原始目标地址,避免将连接目标改写为本地转发地址。
|
||||
return config, nil
|
||||
}
|
||||
|
||||
targetPort := config.Port
|
||||
if targetPort <= 0 {
|
||||
targetPort = defaultPortByType(normalizedType)
|
||||
}
|
||||
forwardedPrimary, err := buildProxyForwardAddress(normalizedProxy, strings.TrimSpace(config.Host), targetPort)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Host = forwardedPrimary.host
|
||||
config.Port = forwardedPrimary.port
|
||||
|
||||
if len(config.Hosts) > 0 {
|
||||
rewritten := make([]string, 0, len(config.Hosts))
|
||||
seen := make(map[string]struct{}, len(config.Hosts))
|
||||
for _, rawEntry := range config.Hosts {
|
||||
targetHost, targetPort, ok := parseAddressWithDefaultPort(rawEntry, defaultPortByType(normalizedType))
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
forwarded, forwardErr := buildProxyForwardAddress(normalizedProxy, targetHost, targetPort)
|
||||
if forwardErr != nil {
|
||||
return connection.ConnectionConfig{}, forwardErr
|
||||
}
|
||||
rewrittenAddress := formatHostPort(forwarded.host, forwarded.port)
|
||||
if _, exists := seen[rewrittenAddress]; exists {
|
||||
continue
|
||||
}
|
||||
seen[rewrittenAddress] = struct{}{}
|
||||
rewritten = append(rewritten, rewrittenAddress)
|
||||
}
|
||||
config.Hosts = rewritten
|
||||
}
|
||||
|
||||
config.UseProxy = false
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
type hostPort struct {
|
||||
host string
|
||||
port int
|
||||
}
|
||||
|
||||
func buildProxyForwardAddress(proxyConfig connection.ProxyConfig, targetHost string, targetPort int) (hostPort, error) {
|
||||
host := strings.TrimSpace(targetHost)
|
||||
if host == "" {
|
||||
host = "localhost"
|
||||
}
|
||||
port := targetPort
|
||||
if port <= 0 {
|
||||
return hostPort{}, fmt.Errorf("目标端口无效:%d", targetPort)
|
||||
}
|
||||
|
||||
forwarder, err := proxytunnel.GetOrCreateLocalForwarder(proxyConfig, host, port)
|
||||
if err != nil {
|
||||
return hostPort{}, err
|
||||
}
|
||||
localHost, localPort, splitOK := parseAddressWithDefaultPort(forwarder.LocalAddr, 0)
|
||||
if !splitOK || localPort <= 0 {
|
||||
return hostPort{}, fmt.Errorf("解析代理本地转发地址失败:%s", forwarder.LocalAddr)
|
||||
}
|
||||
return hostPort{host: localHost, port: localPort}, nil
|
||||
}
|
||||
|
||||
func parseAddressWithDefaultPort(raw string, defaultPort int) (string, int, bool) {
|
||||
text := strings.TrimSpace(raw)
|
||||
if text == "" {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
if strings.HasPrefix(text, "[") {
|
||||
if host, portText, err := net.SplitHostPort(text); err == nil {
|
||||
if port, convErr := strconv.Atoi(portText); convErr == nil && port > 0 && port <= 65535 {
|
||||
return strings.TrimSpace(host), port, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
trimmed := strings.Trim(strings.TrimPrefix(text, "["), "]")
|
||||
if trimmed != "" && defaultPort > 0 {
|
||||
return trimmed, defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
if strings.Count(text, ":") == 0 {
|
||||
if defaultPort <= 0 {
|
||||
return "", 0, false
|
||||
}
|
||||
return text, defaultPort, true
|
||||
}
|
||||
|
||||
if strings.Count(text, ":") == 1 {
|
||||
host, portText, err := net.SplitHostPort(text)
|
||||
if err == nil {
|
||||
port, convErr := strconv.Atoi(portText)
|
||||
if convErr == nil && port > 0 && port <= 65535 {
|
||||
return strings.TrimSpace(host), port, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
if defaultPort > 0 {
|
||||
return strings.TrimSpace(text), defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
// IPv6 地址未带端口,使用默认端口。
|
||||
if defaultPort > 0 {
|
||||
return text, defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
func formatHostPort(host string, port int) string {
|
||||
h := strings.TrimSpace(host)
|
||||
if strings.Contains(h, ":") && !strings.HasPrefix(h, "[") {
|
||||
return fmt.Sprintf("[%s]:%d", h, port)
|
||||
}
|
||||
return fmt.Sprintf("%s:%d", h, port)
|
||||
}
|
||||
|
||||
func defaultPortByType(driverType string) int {
|
||||
switch strings.ToLower(strings.TrimSpace(driverType)) {
|
||||
case "mysql", "mariadb":
|
||||
return 3306
|
||||
case "diros":
|
||||
return 9030
|
||||
case "sphinx":
|
||||
return 9306
|
||||
case "postgres", "vastbase":
|
||||
return 5432
|
||||
case "redis":
|
||||
return 6379
|
||||
case "tdengine":
|
||||
return 6041
|
||||
case "oracle":
|
||||
return 1521
|
||||
case "dameng":
|
||||
return 5236
|
||||
case "kingbase":
|
||||
return 54321
|
||||
case "sqlserver":
|
||||
return 1433
|
||||
case "mongodb":
|
||||
return 27017
|
||||
case "clickhouse":
|
||||
return 9000
|
||||
case "highgo":
|
||||
return 5866
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
64
internal/app/db_proxy_test.go
Normal file
64
internal/app/db_proxy_test.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestResolveDialConfigWithProxy_MongoKeepsTargetAddress(t *testing.T) {
|
||||
hosts := []string{"10.20.30.40:27017", "10.20.30.41:27017"}
|
||||
raw := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
Host: "10.20.30.40",
|
||||
Port: 27017,
|
||||
UseProxy: true,
|
||||
Proxy: connection.ProxyConfig{
|
||||
Type: "socks5",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1080,
|
||||
},
|
||||
Hosts: hosts,
|
||||
}
|
||||
|
||||
got, err := resolveDialConfigWithProxy(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("resolveDialConfigWithProxy returned error: %v", err)
|
||||
}
|
||||
if got.Host != raw.Host || got.Port != raw.Port {
|
||||
t.Fatalf("mongo target address should be kept, got=%s:%d want=%s:%d", got.Host, got.Port, raw.Host, raw.Port)
|
||||
}
|
||||
if !got.UseProxy {
|
||||
t.Fatalf("mongo should keep UseProxy=true for driver-level dialer")
|
||||
}
|
||||
if !reflect.DeepEqual(got.Hosts, hosts) {
|
||||
t.Fatalf("mongo hosts should be kept, got=%v want=%v", got.Hosts, hosts)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveDialConfigWithProxy_MongoSRVKeepsTargetAddress(t *testing.T) {
|
||||
raw := connection.ConnectionConfig{
|
||||
Type: "mongodb",
|
||||
Host: "cluster0.example.com",
|
||||
Port: 27017,
|
||||
MongoSRV: true,
|
||||
UseProxy: true,
|
||||
Proxy: connection.ProxyConfig{
|
||||
Type: "http",
|
||||
Host: "127.0.0.1",
|
||||
Port: 7890,
|
||||
},
|
||||
}
|
||||
|
||||
got, err := resolveDialConfigWithProxy(raw)
|
||||
if err != nil {
|
||||
t.Fatalf("resolveDialConfigWithProxy returned error: %v", err)
|
||||
}
|
||||
if got.Host != raw.Host || got.Port != raw.Port {
|
||||
t.Fatalf("mongo SRV target address should be kept, got=%s:%d want=%s:%d", got.Host, got.Port, raw.Host, raw.Port)
|
||||
}
|
||||
if !got.UseProxy {
|
||||
t.Fatalf("mongo SRV should keep UseProxy=true for driver-level dialer")
|
||||
}
|
||||
}
|
||||
100
internal/app/error_text.go
Normal file
100
internal/app/error_text.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/encoding/simplifiedchinese"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
func normalizeErrorMessage(err error) string {
|
||||
if err == nil {
|
||||
return ""
|
||||
}
|
||||
return normalizeMixedEncodingText(err.Error())
|
||||
}
|
||||
|
||||
func normalizeMixedEncodingText(text string) string {
|
||||
if text == "" {
|
||||
return text
|
||||
}
|
||||
|
||||
raw := []byte(text)
|
||||
output := make([]byte, 0, len(raw)+16)
|
||||
suspect := make([]byte, 0, 16)
|
||||
|
||||
flushSuspect := func() {
|
||||
if len(suspect) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
fallback := strings.ToValidUTF8(string(suspect), "<22>")
|
||||
decoded, _, err := transform.Bytes(simplifiedchinese.GB18030.NewDecoder(), suspect)
|
||||
if err == nil && utf8.Valid(decoded) {
|
||||
candidate := string(decoded)
|
||||
if scoreDecodedText(candidate) > scoreDecodedText(fallback) {
|
||||
output = append(output, []byte(candidate)...)
|
||||
} else {
|
||||
output = append(output, []byte(fallback)...)
|
||||
}
|
||||
} else {
|
||||
output = append(output, []byte(fallback)...)
|
||||
}
|
||||
|
||||
suspect = suspect[:0]
|
||||
}
|
||||
|
||||
for len(raw) > 0 {
|
||||
r, size := utf8.DecodeRune(raw)
|
||||
if r == utf8.RuneError && size == 1 {
|
||||
suspect = append(suspect, raw[0])
|
||||
raw = raw[1:]
|
||||
continue
|
||||
}
|
||||
|
||||
if isLikelyMojibakeRune(r) {
|
||||
suspect = append(suspect, raw[:size]...)
|
||||
} else {
|
||||
flushSuspect()
|
||||
output = append(output, raw[:size]...)
|
||||
}
|
||||
raw = raw[size:]
|
||||
}
|
||||
|
||||
flushSuspect()
|
||||
return string(output)
|
||||
}
|
||||
|
||||
func isLikelyMojibakeRune(r rune) bool {
|
||||
if r == utf8.RuneError {
|
||||
return true
|
||||
}
|
||||
if r >= 0x00C0 && r <= 0x02FF {
|
||||
return true
|
||||
}
|
||||
if unicode.In(r, unicode.Hebrew, unicode.Arabic, unicode.Cyrillic, unicode.Greek) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func scoreDecodedText(text string) int {
|
||||
score := 0
|
||||
for _, r := range text {
|
||||
switch {
|
||||
case r == '<27>':
|
||||
score -= 6
|
||||
case unicode.Is(unicode.Han, r):
|
||||
score += 4
|
||||
case isLikelyMojibakeRune(r):
|
||||
score -= 3
|
||||
case unicode.IsPrint(r):
|
||||
score += 1
|
||||
default:
|
||||
score -= 2
|
||||
}
|
||||
}
|
||||
return score
|
||||
}
|
||||
25
internal/app/error_text_test.go
Normal file
25
internal/app/error_text_test.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package app
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeMixedEncodingText_GBKErrorMessage(t *testing.T) {
|
||||
raw := []byte("pq: ")
|
||||
raw = append(raw, 0xD3, 0xC3, 0xBB, 0xA7) // 用户
|
||||
raw = append(raw, []byte(` "root" Password `)...)
|
||||
raw = append(raw, 0xC8, 0xCF, 0xD6, 0xA4, 0xCA, 0xA7, 0xB0, 0xDC) // 认证失败
|
||||
raw = append(raw, []byte(" (28P01)")...)
|
||||
|
||||
got := normalizeMixedEncodingText(string(raw))
|
||||
want := `pq: 用户 "root" Password 认证失败 (28P01)`
|
||||
if got != want {
|
||||
t.Fatalf("normalizeMixedEncodingText() mismatch\nwant: %q\ngot: %q", want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeMixedEncodingText_KeepUTF8(t *testing.T) {
|
||||
input := `连接建立后验证失败:pq: password authentication failed for user "root"`
|
||||
got := normalizeMixedEncodingText(input)
|
||||
if got != input {
|
||||
t.Fatalf("expected unchanged utf8 text, got: %q", got)
|
||||
}
|
||||
}
|
||||
314
internal/app/global_proxy.go
Normal file
314
internal/app/global_proxy.go
Normal file
@@ -0,0 +1,314 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
type globalProxySnapshot struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Proxy connection.ProxyConfig `json:"proxy"`
|
||||
}
|
||||
|
||||
var globalProxyRuntime = struct {
|
||||
mu sync.RWMutex
|
||||
enabled bool
|
||||
proxy connection.ProxyConfig
|
||||
}{}
|
||||
|
||||
type localProxyTLSFallbackTransport struct {
|
||||
primary *http.Transport
|
||||
fallback *http.Transport
|
||||
proxyEndpoint string
|
||||
}
|
||||
|
||||
func currentGlobalProxyConfig() globalProxySnapshot {
|
||||
globalProxyRuntime.mu.RLock()
|
||||
defer globalProxyRuntime.mu.RUnlock()
|
||||
if !globalProxyRuntime.enabled {
|
||||
return globalProxySnapshot{
|
||||
Enabled: false,
|
||||
Proxy: connection.ProxyConfig{},
|
||||
}
|
||||
}
|
||||
return globalProxySnapshot{
|
||||
Enabled: true,
|
||||
Proxy: globalProxyRuntime.proxy,
|
||||
}
|
||||
}
|
||||
|
||||
func setGlobalProxyConfig(enabled bool, proxyConfig connection.ProxyConfig) (globalProxySnapshot, error) {
|
||||
if !enabled {
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = false
|
||||
globalProxyRuntime.proxy = connection.ProxyConfig{}
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return globalProxySnapshot{}, err
|
||||
}
|
||||
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = true
|
||||
globalProxyRuntime.proxy = normalizedProxy
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
func (a *App) ConfigureGlobalProxy(enabled bool, proxyConfig connection.ProxyConfig) connection.QueryResult {
|
||||
before := currentGlobalProxyConfig()
|
||||
snapshot, err := setGlobalProxyConfig(enabled, proxyConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
// 前端可能在同一配置下重复触发同步(例如严格模式或状态回放),
|
||||
// 这里做幂等日志,避免重复刷屏。
|
||||
if !globalProxySnapshotEqual(before, snapshot) {
|
||||
if snapshot.Enabled {
|
||||
authState := ""
|
||||
if strings.TrimSpace(snapshot.Proxy.User) != "" {
|
||||
authState = "(认证:已配置)"
|
||||
}
|
||||
logger.Infof(
|
||||
"全局代理已启用:%s://%s:%d%s",
|
||||
strings.ToLower(strings.TrimSpace(snapshot.Proxy.Type)),
|
||||
strings.TrimSpace(snapshot.Proxy.Host),
|
||||
snapshot.Proxy.Port,
|
||||
authState,
|
||||
)
|
||||
} else {
|
||||
logger.Infof("全局代理已关闭")
|
||||
}
|
||||
}
|
||||
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "全局代理配置已生效",
|
||||
Data: snapshot,
|
||||
}
|
||||
}
|
||||
|
||||
func globalProxySnapshotEqual(a, b globalProxySnapshot) bool {
|
||||
if a.Enabled != b.Enabled {
|
||||
return false
|
||||
}
|
||||
if !a.Enabled {
|
||||
return true
|
||||
}
|
||||
return proxyConfigEqual(a.Proxy, b.Proxy)
|
||||
}
|
||||
|
||||
func proxyConfigEqual(a, b connection.ProxyConfig) bool {
|
||||
return strings.EqualFold(strings.TrimSpace(a.Type), strings.TrimSpace(b.Type)) &&
|
||||
strings.TrimSpace(a.Host) == strings.TrimSpace(b.Host) &&
|
||||
a.Port == b.Port &&
|
||||
strings.TrimSpace(a.User) == strings.TrimSpace(b.User) &&
|
||||
a.Password == b.Password
|
||||
}
|
||||
|
||||
func (a *App) GetGlobalProxyConfig() connection.QueryResult {
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "OK",
|
||||
Data: currentGlobalProxyConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
func applyGlobalProxyToConnection(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
effective := config
|
||||
if effective.UseProxy || effective.UseHTTPTunnel {
|
||||
return effective
|
||||
}
|
||||
if isFileDatabaseType(effective.Type) {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
effective.UseProxy = true
|
||||
effective.Proxy = snapshot.Proxy
|
||||
return effective
|
||||
}
|
||||
|
||||
func isFileDatabaseType(driverType string) bool {
|
||||
switch strings.ToLower(strings.TrimSpace(driverType)) {
|
||||
case "sqlite", "duckdb":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func newHTTPClientWithGlobalProxy(timeout time.Duration) *http.Client {
|
||||
client := &http.Client{
|
||||
Timeout: timeout,
|
||||
}
|
||||
if transport := buildHTTPTransportWithGlobalProxy(); transport != nil {
|
||||
client.Transport = transport
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
func buildHTTPTransportWithGlobalProxy() http.RoundTripper {
|
||||
baseTransport, ok := http.DefaultTransport.(*http.Transport)
|
||||
if !ok || baseTransport == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
transport := baseTransport.Clone()
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
proxyURL, err := buildProxyURLFromConfig(snapshot.Proxy)
|
||||
if err != nil {
|
||||
logger.Warnf("全局代理配置无效,回退系统代理:%v", err)
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
transport.Proxy = http.ProxyURL(proxyURL)
|
||||
if !isLoopbackProxyHost(snapshot.Proxy.Host) {
|
||||
return transport
|
||||
}
|
||||
|
||||
fallbackTransport := transport.Clone()
|
||||
fallbackTransport.TLSClientConfig = cloneTLSConfigWithInsecureSkipVerify(fallbackTransport.TLSClientConfig)
|
||||
return &localProxyTLSFallbackTransport{
|
||||
primary: transport,
|
||||
fallback: fallbackTransport,
|
||||
proxyEndpoint: proxyURL.Redacted(),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *localProxyTLSFallbackTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
resp, err := t.primary.RoundTrip(req)
|
||||
if err == nil {
|
||||
return resp, nil
|
||||
}
|
||||
if !isTLSFallbackCandidate(req.Method, err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
retryReq, cloneErr := cloneRequestForRetry(req)
|
||||
if cloneErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
logger.Warnf("检测到本地代理 TLS 证书不受信任,启用兼容回退:代理=%s 目标=%s 错误=%v", t.proxyEndpoint, req.URL.String(), err)
|
||||
return t.fallback.RoundTrip(retryReq)
|
||||
}
|
||||
|
||||
func isTLSFallbackCandidate(method string, err error) bool {
|
||||
if !isIdempotentRequestMethod(method) {
|
||||
return false
|
||||
}
|
||||
return isUnknownAuthorityError(err)
|
||||
}
|
||||
|
||||
func isIdempotentRequestMethod(method string) bool {
|
||||
switch strings.ToUpper(strings.TrimSpace(method)) {
|
||||
case http.MethodGet, http.MethodHead:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func cloneRequestForRetry(req *http.Request) (*http.Request, error) {
|
||||
cloned := req.Clone(req.Context())
|
||||
if req.Body == nil || req.Body == http.NoBody {
|
||||
return cloned, nil
|
||||
}
|
||||
if req.GetBody == nil {
|
||||
return nil, fmt.Errorf("request body not replayable")
|
||||
}
|
||||
body, err := req.GetBody()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cloned.Body = body
|
||||
return cloned, nil
|
||||
}
|
||||
|
||||
func isUnknownAuthorityError(err error) bool {
|
||||
var unknownErr x509.UnknownAuthorityError
|
||||
if errors.As(err, &unknownErr) {
|
||||
return true
|
||||
}
|
||||
return strings.Contains(strings.ToLower(err.Error()), "x509: certificate signed by unknown authority")
|
||||
}
|
||||
|
||||
func cloneTLSConfigWithInsecureSkipVerify(base *tls.Config) *tls.Config {
|
||||
if base == nil {
|
||||
return &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
cloned := base.Clone()
|
||||
cloned.InsecureSkipVerify = true
|
||||
return cloned
|
||||
}
|
||||
|
||||
func isLoopbackProxyHost(host string) bool {
|
||||
trimmed := strings.TrimSpace(host)
|
||||
if trimmed == "" {
|
||||
return false
|
||||
}
|
||||
if strings.EqualFold(trimmed, "localhost") {
|
||||
return true
|
||||
}
|
||||
ip := net.ParseIP(trimmed)
|
||||
if ip == nil {
|
||||
return false
|
||||
}
|
||||
return ip.IsLoopback()
|
||||
}
|
||||
|
||||
func buildProxyURLFromConfig(proxyConfig connection.ProxyConfig) (*url.URL, error) {
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxyType := strings.ToLower(strings.TrimSpace(normalizedProxy.Type))
|
||||
if proxyType != "http" && proxyType != "socks5" {
|
||||
return nil, fmt.Errorf("不支持的代理类型:%s", normalizedProxy.Type)
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.Host) == "" {
|
||||
return nil, fmt.Errorf("代理地址不能为空")
|
||||
}
|
||||
if normalizedProxy.Port <= 0 || normalizedProxy.Port > 65535 {
|
||||
return nil, fmt.Errorf("代理端口无效:%d", normalizedProxy.Port)
|
||||
}
|
||||
|
||||
proxyURL := &url.URL{
|
||||
Scheme: proxyType,
|
||||
Host: net.JoinHostPort(strings.TrimSpace(normalizedProxy.Host), strconv.Itoa(normalizedProxy.Port)),
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.User) != "" {
|
||||
proxyURL.User = url.UserPassword(strings.TrimSpace(normalizedProxy.User), normalizedProxy.Password)
|
||||
}
|
||||
return proxyURL, nil
|
||||
}
|
||||
@@ -3,14 +3,26 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
)
|
||||
|
||||
const testConnectionTimeoutUpperBoundSeconds = 12
|
||||
|
||||
func normalizeTestConnectionConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := config
|
||||
if normalized.Timeout <= 0 || normalized.Timeout > testConnectionTimeoutUpperBoundSeconds {
|
||||
normalized.Timeout = testConnectionTimeoutUpperBoundSeconds
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
// Generic DB Methods
|
||||
|
||||
func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
@@ -20,25 +32,63 @@ func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResu
|
||||
logger.Error(err, "DBConnect 连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
|
||||
logger.Infof("DBConnect 连接成功:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
func (a *App) TestConnection(config connection.ConnectionConfig) connection.QueryResult {
|
||||
_, err := a.getDatabaseForcePing(config)
|
||||
testConfig := normalizeTestConnectionConfig(config)
|
||||
started := time.Now()
|
||||
logger.Infof("TestConnection 开始:%s", formatConnSummary(testConfig))
|
||||
_, err := a.getDatabaseForcePing(testConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "TestConnection 连接测试失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "TestConnection 连接测试失败:耗时=%s %s", time.Since(started).Round(time.Millisecond), formatConnSummary(testConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("TestConnection 连接测试成功:%s", formatConnSummary(config))
|
||||
|
||||
logger.Infof("TestConnection 连接测试成功:耗时=%s %s", time.Since(started).Round(time.Millisecond), formatConnSummary(testConfig))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
func (a *App) MongoDiscoverMembers(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "mongodb"
|
||||
|
||||
dbInst, err := a.getDatabaseForcePing(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "MongoDiscoverMembers 获取连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
discoverable, ok := dbInst.(interface {
|
||||
DiscoverMembers() (string, []connection.MongoMemberInfo, error)
|
||||
})
|
||||
if !ok {
|
||||
return connection.QueryResult{Success: false, Message: "当前 MongoDB 驱动不支持成员发现"}
|
||||
}
|
||||
|
||||
replicaSet, members, err := discoverable.DiscoverMembers()
|
||||
if err != nil {
|
||||
logger.Error(err, "MongoDiscoverMembers 执行失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"replicaSet": replicaSet,
|
||||
"members": members,
|
||||
}
|
||||
|
||||
logger.Infof("MongoDiscoverMembers 成功:%s 成员数=%d 副本集=%s", formatConnSummary(config), len(members), replicaSet)
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("发现 %d 个成员", len(members)),
|
||||
Data: data,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
runConfig.Database = ""
|
||||
runConfig.Database = ""
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -47,9 +97,18 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
|
||||
escapedDbName := strings.ReplaceAll(dbName, "`", "``")
|
||||
query := fmt.Sprintf("CREATE DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci", escapedDbName)
|
||||
if runConfig.Type == "postgres" {
|
||||
dbType := strings.ToLower(strings.TrimSpace(runConfig.Type))
|
||||
if dbType == "postgres" || dbType == "kingbase" || dbType == "highgo" || dbType == "vastbase" {
|
||||
escapedDbName = strings.ReplaceAll(dbName, `"`, `""`)
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%s\"", escapedDbName)
|
||||
} else if dbType == "tdengine" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "clickhouse" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "mariadb" || dbType == "diros" {
|
||||
// MariaDB uses same syntax as MySQL
|
||||
} else if dbType == "sphinx" {
|
||||
return connection.QueryResult{Success: false, Message: "Sphinx 暂不支持创建数据库"}
|
||||
}
|
||||
|
||||
_, err = dbInst.Exec(query)
|
||||
@@ -60,6 +119,251 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: true, Message: "Database created successfully"}
|
||||
}
|
||||
|
||||
func resolveDDLDBType(config connection.ConnectionConfig) string {
|
||||
dbType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if dbType != "custom" {
|
||||
return dbType
|
||||
}
|
||||
|
||||
driver := strings.ToLower(strings.TrimSpace(config.Driver))
|
||||
switch driver {
|
||||
case "postgresql", "postgres", "pg", "pq", "pgx":
|
||||
return "postgres"
|
||||
case "dm", "dameng", "dm8":
|
||||
return "dameng"
|
||||
case "sqlite3", "sqlite":
|
||||
return "sqlite"
|
||||
case "sphinxql":
|
||||
return "sphinx"
|
||||
case "diros", "doris":
|
||||
return "diros"
|
||||
case "kingbase", "kingbase8", "kingbasees", "kingbasev8":
|
||||
return "kingbase"
|
||||
case "highgo":
|
||||
return "highgo"
|
||||
case "vastbase":
|
||||
return "vastbase"
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.Contains(driver, "postgres"):
|
||||
return "postgres"
|
||||
case strings.Contains(driver, "kingbase"):
|
||||
return "kingbase"
|
||||
case strings.Contains(driver, "highgo"):
|
||||
return "highgo"
|
||||
case strings.Contains(driver, "vastbase"):
|
||||
return "vastbase"
|
||||
case strings.Contains(driver, "sqlite"):
|
||||
return "sqlite"
|
||||
case strings.Contains(driver, "sphinx"):
|
||||
return "sphinx"
|
||||
case strings.Contains(driver, "diros"), strings.Contains(driver, "doris"):
|
||||
return "diros"
|
||||
default:
|
||||
return driver
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTableByType(dbType string, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
return "public", rawTable
|
||||
default:
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
func quoteTableIdentByType(dbType string, schema string, table string) string {
|
||||
s := strings.TrimSpace(schema)
|
||||
t := strings.TrimSpace(table)
|
||||
if s == "" {
|
||||
return quoteIdentByType(dbType, t)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", quoteIdentByType(dbType, s), quoteIdentByType(dbType, t))
|
||||
}
|
||||
|
||||
func buildRunConfigForDDL(config connection.ConnectionConfig, dbType string, dbName string) connection.ConnectionConfig {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "custom") {
|
||||
// custom 连接的 dbName 语义依赖 driver,尽量在常见驱动上对齐内置类型行为。
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "vastbase", "dameng", "clickhouse":
|
||||
if strings.TrimSpace(dbName) != "" {
|
||||
runConfig.Database = strings.TrimSpace(dbName)
|
||||
}
|
||||
}
|
||||
}
|
||||
return runConfig
|
||||
}
|
||||
|
||||
func (a *App) RenameDatabase(config connection.ConnectionConfig, oldName string, newName string) connection.QueryResult {
|
||||
oldName = strings.TrimSpace(oldName)
|
||||
newName = strings.TrimSpace(newName)
|
||||
if oldName == "" || newName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "数据库名称不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldName, newName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧数据库名称不能相同"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Doris/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), oldName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再重命名"}
|
||||
}
|
||||
runConfig := config
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
sql := fmt.Sprintf("ALTER DATABASE %s RENAME TO %s", quoteIdentByType(dbType, oldName), quoteIdentByType(dbType, newName))
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "数据库重命名成功"}
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名数据库", dbType)}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) DropDatabase(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
dbName = strings.TrimSpace(dbName)
|
||||
if dbName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "数据库名称不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
var (
|
||||
runConfig connection.ConnectionConfig
|
||||
sql string
|
||||
)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "tdengine", "clickhouse":
|
||||
runConfig = config
|
||||
runConfig.Database = ""
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), dbName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再删除"}
|
||||
}
|
||||
runConfig = config
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除数据库", dbType)}
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "数据库删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, oldTableName string, newTableName string) connection.QueryResult {
|
||||
oldTableName = strings.TrimSpace(oldTableName)
|
||||
newTableName = strings.TrimSpace(newTableName)
|
||||
if oldTableName == "" || newTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldTableName, newTableName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧表名不能相同"}
|
||||
}
|
||||
if strings.Contains(newTableName, ".") {
|
||||
return connection.QueryResult{Success: false, Message: "新表名不能包含 schema 或数据库前缀"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名表", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureOldTableName := normalizeSchemaAndTableByType(dbType, dbName, oldTableName)
|
||||
if pureOldTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "旧表名不能为空"}
|
||||
}
|
||||
oldQualifiedTable := quoteTableIdentByType(dbType, schemaName, pureOldTableName)
|
||||
newTableQuoted := quoteIdentByType(dbType, newTableName)
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualifiedTable := quoteTableIdentByType(dbType, schemaName, newTableName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualifiedTable, newQualifiedTable)
|
||||
case "sqlserver":
|
||||
// SQL Server 使用 sp_rename,参数为 'schema.oldname', 'newname'
|
||||
oldFullName := schemaName + "." + pureOldTableName
|
||||
escapedOld := strings.ReplaceAll(oldFullName, "'", "''")
|
||||
escapedNew := strings.ReplaceAll(newTableName, "'", "''")
|
||||
sql = fmt.Sprintf("EXEC sp_rename '%s', '%s'", escapedOld, escapedNew)
|
||||
default:
|
||||
sql = fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldQualifiedTable, newTableQuoted)
|
||||
}
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "表重命名成功"}
|
||||
}
|
||||
|
||||
func (a *App) DropTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
tableName = strings.TrimSpace(tableName)
|
||||
if tableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除表", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureTableName := normalizeSchemaAndTableByType(dbType, dbName, tableName)
|
||||
if pureTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
qualifiedTable := quoteTableIdentByType(dbType, schemaName, pureTableName)
|
||||
sql := fmt.Sprintf("DROP TABLE %s", qualifiedTable)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "表删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) MySQLConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "mysql"
|
||||
return a.DBConnect(config)
|
||||
@@ -86,12 +390,21 @@ func (a *App) MySQLShowCreateTable(config connection.ConnectionConfig, dbName st
|
||||
}
|
||||
|
||||
func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
return a.DBQueryWithCancel(config, dbName, query, "")
|
||||
}
|
||||
|
||||
func (a *App) DBQueryWithCancel(config connection.ConnectionConfig, dbName string, query string, queryID string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
// Generate query ID if not provided
|
||||
if queryID == "" {
|
||||
queryID = generateQueryID()
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
@@ -102,8 +415,103 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
if strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain") {
|
||||
// Store cancel function for potential manual cancellation
|
||||
a.queryMu.Lock()
|
||||
a.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
a.queryMu.Unlock()
|
||||
|
||||
// Ensure query is removed from tracking when done
|
||||
defer func() {
|
||||
a.queryMu.Lock()
|
||||
delete(a.runningQueries, queryID)
|
||||
a.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
runReadQuery := func(inst db.Database) ([]map[string]interface{}, []string, error) {
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
runExecQuery := func(inst db.Database) (int64, error) {
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
|
||||
if isReadQuery {
|
||||
data, columns, err := runReadQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
data, columns, err = runReadQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns, QueryID: queryID}
|
||||
} else {
|
||||
affected, err := runExecQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
affected, err = runExecQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}, QueryID: queryID}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) DBQueryIsolated(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.openDatabaseIsolated(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
defer func() {
|
||||
if closeErr := dbInst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "DBQueryIsolated 关闭临时连接失败:%s", formatConnSummary(runConfig))
|
||||
}
|
||||
}()
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
timeoutSeconds := runConfig.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
if q, ok := dbInst.(interface {
|
||||
@@ -114,25 +522,25 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
data, columns, err = dbInst.Query(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
logger.Error(err, "DBQueryIsolated 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
} else {
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
|
||||
func sqlSnippet(query string) string {
|
||||
@@ -144,29 +552,107 @@ func sqlSnippet(query string) string {
|
||||
return q[:max] + "..."
|
||||
}
|
||||
|
||||
func ensureNonNilSlice[T any](items []T) []T {
|
||||
if items == nil {
|
||||
return make([]T, 0)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.QueryResult {
|
||||
dbInst, err := a.getDatabase(config)
|
||||
runConfig := normalizeRunConfig(config, "")
|
||||
if strings.EqualFold(strings.TrimSpace(runConfig.Type), "redis") {
|
||||
runConfig.Type = "redis"
|
||||
client, err := a.getRedisClient(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取 Redis 连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
dbs, err := client.GetDatabases()
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取 Redis 库列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
resData := make([]map[string]string, 0, len(dbs))
|
||||
for _, item := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": strconv.Itoa(item.Index)})
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
dbs, err := dbInst.GetDatabases()
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetDatabases 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
dbs, err = retryInst.GetDatabases()
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
|
||||
resData := make([]map[string]string, 0, len(dbs))
|
||||
for _, name := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": name})
|
||||
}
|
||||
|
||||
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
|
||||
func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
if strings.EqualFold(strings.TrimSpace(runConfig.Type), "redis") {
|
||||
runConfig.Type = "redis"
|
||||
client, err := a.getRedisClient(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取 Redis 连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
cursor := uint64(0)
|
||||
tables := make([]string, 0, 128)
|
||||
seen := make(map[string]struct{}, 128)
|
||||
for {
|
||||
result, err := client.ScanKeys("*", cursor, 1000)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 扫描 Redis Key 失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
for _, item := range result.Keys {
|
||||
key := strings.TrimSpace(item.Key)
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
tables = append(tables, key)
|
||||
}
|
||||
if strings.TrimSpace(result.Cursor) == "" || strings.TrimSpace(result.Cursor) == "0" {
|
||||
break
|
||||
}
|
||||
next, err := strconv.ParseUint(strings.TrimSpace(result.Cursor), 10, 64)
|
||||
if err != nil || next == cursor {
|
||||
break
|
||||
}
|
||||
cursor = next
|
||||
}
|
||||
resData := make([]map[string]string, 0, len(tables))
|
||||
for _, name := range tables {
|
||||
resData = append(resData, map[string]string{"Table": name})
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -175,12 +661,22 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
tables, err := dbInst.GetTables(dbName)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetTables 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
tables, err = retryInst.GetTables(dbName)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取表列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
resData := make([]map[string]string, 0, len(tables))
|
||||
for _, name := range tables {
|
||||
resData = append(resData, map[string]string{"Table": name})
|
||||
}
|
||||
@@ -189,7 +685,8 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
dbType := resolveDDLDBType(config)
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -197,8 +694,7 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
sqlStr, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
sqlStr, err := resolveCreateStatementWithFallback(dbInst, config, dbName, tableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取建表语句失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -207,6 +703,147 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
return connection.QueryResult{Success: true, Data: sqlStr}
|
||||
}
|
||||
|
||||
func resolveCreateStatementWithFallback(dbInst db.Database, config connection.ConnectionConfig, dbName string, tableName string) (string, error) {
|
||||
dbType := resolveDDLDBType(config)
|
||||
schemaName, pureTableName := normalizeSchemaAndTableByType(dbType, dbName, tableName)
|
||||
if pureTableName == "" {
|
||||
return "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
sqlStr, sourceErr := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
if sourceErr == nil && !shouldFallbackCreateStatement(dbType, sqlStr) {
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
columns, colErr := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if colErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", colErr
|
||||
}
|
||||
|
||||
fallbackDDL, buildErr := buildFallbackCreateStatement(dbType, schemaName, pureTableName, columns)
|
||||
if buildErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", buildErr
|
||||
}
|
||||
return fallbackDDL, nil
|
||||
}
|
||||
|
||||
func supportsCreateStatementFallback(dbType string) bool {
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func shouldFallbackCreateStatement(dbType string, ddl string) bool {
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
return false
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(ddl)
|
||||
if trimmed == "" {
|
||||
return true
|
||||
}
|
||||
if hasCreateTableHead(trimmed) {
|
||||
return false
|
||||
}
|
||||
|
||||
lower := strings.ToLower(trimmed)
|
||||
if strings.Contains(lower, "not fully supported") ||
|
||||
strings.Contains(lower, "not directly supported") ||
|
||||
strings.Contains(lower, "not supported") {
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func hasCreateTableHead(sqlText string) bool {
|
||||
lines := strings.Split(sqlText, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(line, "--") || strings.HasPrefix(line, "/*") || strings.HasPrefix(line, "*") {
|
||||
continue
|
||||
}
|
||||
return strings.HasPrefix(strings.ToLower(line), "create table")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func buildFallbackCreateStatement(dbType string, schemaName string, tableName string, columns []connection.ColumnDefinition) (string, error) {
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
if len(columns) == 0 {
|
||||
return "", fmt.Errorf("未获取到字段定义,无法生成建表语句")
|
||||
}
|
||||
|
||||
qualifiedTable := quoteTableIdentByType(dbType, schemaName, table)
|
||||
columnLines := make([]string, 0, len(columns)+1)
|
||||
primaryKeys := make([]string, 0, 2)
|
||||
|
||||
for _, col := range columns {
|
||||
colNameRaw := strings.TrimSpace(col.Name)
|
||||
if colNameRaw == "" {
|
||||
continue
|
||||
}
|
||||
colType := strings.TrimSpace(col.Type)
|
||||
if colType == "" {
|
||||
colType = "text"
|
||||
}
|
||||
|
||||
colName := quoteIdentByType(dbType, colNameRaw)
|
||||
defParts := []string{fmt.Sprintf("%s %s", colName, colType)}
|
||||
|
||||
if strings.EqualFold(strings.TrimSpace(col.Nullable), "NO") {
|
||||
defParts = append(defParts, "NOT NULL")
|
||||
}
|
||||
if col.Default != nil {
|
||||
defVal := strings.TrimSpace(*col.Default)
|
||||
if defVal != "" {
|
||||
defParts = append(defParts, "DEFAULT "+defVal)
|
||||
}
|
||||
}
|
||||
|
||||
columnLines = append(columnLines, " "+strings.Join(defParts, " "))
|
||||
if strings.EqualFold(strings.TrimSpace(col.Key), "PRI") {
|
||||
primaryKeys = append(primaryKeys, colName)
|
||||
}
|
||||
}
|
||||
|
||||
if len(columnLines) == 0 {
|
||||
return "", fmt.Errorf("字段定义为空,无法生成建表语句")
|
||||
}
|
||||
if len(primaryKeys) > 0 {
|
||||
columnLines = append(columnLines, " PRIMARY KEY ("+strings.Join(primaryKeys, ", ")+")")
|
||||
}
|
||||
|
||||
ddl := strings.Builder{}
|
||||
ddl.WriteString("CREATE TABLE ")
|
||||
ddl.WriteString(qualifiedTable)
|
||||
ddl.WriteString(" (\n")
|
||||
ddl.WriteString(strings.Join(columnLines, ",\n"))
|
||||
ddl.WriteString("\n);")
|
||||
return ddl.String(), nil
|
||||
}
|
||||
|
||||
func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
@@ -221,7 +858,7 @@ func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: columns}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(columns)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -238,7 +875,7 @@ func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: indexes}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(indexes)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -255,7 +892,7 @@ func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: fks}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(fks)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -272,7 +909,129 @@ func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, t
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: triggers}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(triggers)}
|
||||
}
|
||||
|
||||
func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewName string) connection.QueryResult {
|
||||
viewName = strings.TrimSpace(viewName)
|
||||
if viewName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除视图", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureViewName := normalizeSchemaAndTableByType(dbType, dbName, viewName)
|
||||
if pureViewName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
qualifiedView := quoteTableIdentByType(dbType, schemaName, pureViewName)
|
||||
sql := fmt.Sprintf("DROP VIEW %s", qualifiedView)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "视图删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) DropFunction(config connection.ConnectionConfig, dbName string, routineName string, routineType string) connection.QueryResult {
|
||||
routineName = strings.TrimSpace(routineName)
|
||||
routineType = strings.TrimSpace(strings.ToUpper(routineType))
|
||||
if routineName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "函数/存储过程名称不能为空"}
|
||||
}
|
||||
if routineType != "FUNCTION" && routineType != "PROCEDURE" {
|
||||
routineType = "FUNCTION"
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "duckdb":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除函数/存储过程", dbType)}
|
||||
}
|
||||
if dbType == "duckdb" && routineType == "PROCEDURE" {
|
||||
return connection.QueryResult{Success: false, Message: "DuckDB 暂不支持存储过程"}
|
||||
}
|
||||
|
||||
schemaName, pureName := normalizeSchemaAndTableByType(dbType, dbName, routineName)
|
||||
if pureName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "函数/存储过程名称不能为空"}
|
||||
}
|
||||
qualifiedName := quoteTableIdentByType(dbType, schemaName, pureName)
|
||||
sql := fmt.Sprintf("DROP %s %s", routineType, qualifiedName)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
label := "函数"
|
||||
if routineType == "PROCEDURE" {
|
||||
label = "存储过程"
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: fmt.Sprintf("%s删除成功", label)}
|
||||
}
|
||||
|
||||
func (a *App) RenameView(config connection.ConnectionConfig, dbName string, oldName string, newName string) connection.QueryResult {
|
||||
oldName = strings.TrimSpace(oldName)
|
||||
newName = strings.TrimSpace(newName)
|
||||
if oldName == "" || newName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldName, newName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧视图名称不能相同"}
|
||||
}
|
||||
if strings.Contains(newName, ".") {
|
||||
return connection.QueryResult{Success: false, Message: "新视图名不能包含 schema 或数据库前缀"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
schemaName, pureOldName := normalizeSchemaAndTableByType(dbType, dbName, oldName)
|
||||
if pureOldName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "旧视图名不能为空"}
|
||||
}
|
||||
oldQualified := quoteTableIdentByType(dbType, schemaName, pureOldName)
|
||||
newQuoted := quoteIdentByType(dbType, newName)
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualified := quoteTableIdentByType(dbType, schemaName, newName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualified, newQualified)
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
sql = fmt.Sprintf("ALTER VIEW %s RENAME TO %s", oldQualified, newQuoted)
|
||||
case "sqlserver":
|
||||
oldFullName := schemaName + "." + pureOldName
|
||||
escapedOld := strings.ReplaceAll(oldFullName, "'", "''")
|
||||
escapedNew := strings.ReplaceAll(newName, "'", "''")
|
||||
sql = fmt.Sprintf("EXEC sp_rename '%s', '%s'", escapedOld, escapedNew)
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名视图", dbType)}
|
||||
}
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "视图重命名成功"}
|
||||
}
|
||||
|
||||
func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
@@ -288,5 +1047,5 @@ func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: cols}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(cols)}
|
||||
}
|
||||
|
||||
149
internal/app/methods_db_cancel_test.go
Normal file
149
internal/app/methods_db_cancel_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGenerateQueryID(t *testing.T) {
|
||||
app := NewApp()
|
||||
id := app.GenerateQueryID()
|
||||
if id == "" {
|
||||
t.Fatal("GenerateQueryID returned empty string")
|
||||
}
|
||||
// Should start with "query-"
|
||||
if !strings.HasPrefix(id, "query-") {
|
||||
t.Fatalf("Expected query ID to start with 'query-', got: %s", id)
|
||||
}
|
||||
// Should be reasonably unique (not equal to another generated ID)
|
||||
id2 := app.GenerateQueryID()
|
||||
if id == id2 {
|
||||
t.Fatal("Two consecutive GenerateQueryID calls returned identical IDs")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_NonExistent(t *testing.T) {
|
||||
app := NewApp()
|
||||
res := app.CancelQuery("non-existent-query-id")
|
||||
if res.Success {
|
||||
t.Fatal("CancelQuery should fail for non-existent query ID")
|
||||
}
|
||||
if !strings.Contains(res.Message, "不存在") && !strings.Contains(res.Message, "not exist") {
|
||||
t.Fatalf("Expected error message about query not existing, got: %s", res.Message)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_ValidQuery(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// First, generate a query ID and simulate a running query
|
||||
queryID := app.GenerateQueryID()
|
||||
|
||||
// Store a cancel function in runningQueries map
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Ensure cleanup after test
|
||||
defer func() {
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, queryID)
|
||||
app.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
// Cancel the query
|
||||
res := app.CancelQuery(queryID)
|
||||
if !res.Success {
|
||||
t.Fatalf("CancelQuery should succeed for valid query ID, got: %s", res.Message)
|
||||
}
|
||||
|
||||
// Verify query removed from map
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Query should be removed from runningQueries after cancellation")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanupStaleQueries(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// Add a stale query (started 2 hours ago)
|
||||
queryID := app.GenerateQueryID()
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now().Add(-2 * time.Hour),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify stale query was removed
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Stale query should be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Add a fresh query (started 30 minutes ago)
|
||||
freshID := app.GenerateQueryID()
|
||||
_, cancel2 := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[freshID] = queryContext{
|
||||
cancel: cancel2,
|
||||
started: time.Now().Add(-30 * time.Minute),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
defer cancel2()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify fresh query still exists
|
||||
app.queryMu.Lock()
|
||||
_, exists = app.runningQueries[freshID]
|
||||
app.queryMu.Unlock()
|
||||
if !exists {
|
||||
t.Fatal("Fresh query should not be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Clean up
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, freshID)
|
||||
app.queryMu.Unlock()
|
||||
}
|
||||
|
||||
func TestDBQueryWithCancel_QueryIDPropagation(t *testing.T) {
|
||||
// This test verifies that query ID is properly propagated in QueryResult
|
||||
// Since we can't easily mock database connections, we'll test the integration
|
||||
// by checking that DBQueryWithCancel returns a QueryResult with QueryID field
|
||||
|
||||
app := NewApp()
|
||||
|
||||
// Create a minimal config for a database type that doesn't require actual connection
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: ":memory:", // In-memory duckdb for testing
|
||||
}
|
||||
|
||||
// This will fail because we can't actually connect, but we can test the error path
|
||||
result := app.DBQueryWithCancel(config, "", "SELECT 1", "test-query-id")
|
||||
|
||||
// The query should fail (no actual database), but QueryID should be present
|
||||
if result.QueryID != "test-query-id" {
|
||||
t.Fatalf("Expected QueryID 'test-query-id' in result, got: %s", result.QueryID)
|
||||
}
|
||||
}
|
||||
174
internal/app/methods_db_create_statement_test.go
Normal file
174
internal/app/methods_db_create_statement_test.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeCreateStatementDB struct {
|
||||
createSQL string
|
||||
createErr error
|
||||
columns []connection.ColumnDefinition
|
||||
columnsErr error
|
||||
|
||||
createSchema string
|
||||
createTable string
|
||||
colsSchema string
|
||||
colsTable string
|
||||
}
|
||||
|
||||
func (f *fakeCreateStatementDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeCreateStatementDB) Close() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Ping() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
return nil, nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeCreateStatementDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetTables(dbName string) ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
f.createSchema = dbName
|
||||
f.createTable = tableName
|
||||
return f.createSQL, f.createErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
f.colsSchema = dbName
|
||||
f.colsTable = tableName
|
||||
return f.columns, f.columnsErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestResolveDDLDBType_CustomDriverAlias(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
driver string
|
||||
want string
|
||||
}{
|
||||
{name: "postgresql alias", driver: "postgresql", want: "postgres"},
|
||||
{name: "pgx alias", driver: "pgx", want: "postgres"},
|
||||
{name: "kingbase8 alias", driver: "kingbase8", want: "kingbase"},
|
||||
{name: "kingbase contains alias", driver: "kingbasees", want: "kingbase"},
|
||||
{name: "dm alias", driver: "dm8", want: "dameng"},
|
||||
{name: "sqlite alias", driver: "sqlite3", want: "sqlite"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := connection.ConnectionConfig{Type: "custom", Driver: tc.driver}
|
||||
if got := resolveDDLDBType(cfg); got != tc.want {
|
||||
t.Fatalf("resolveDDLDBType() mismatch, want=%q got=%q", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_CustomKingbaseUsesPublicSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "kingbase8",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "public" || dbInst.colsSchema != "public" {
|
||||
t.Fatalf("expected fallback schema public, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with public schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_KeepQualifiedSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "-- SHOW CREATE TABLE not fully supported for PostgreSQL in this MVP.",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "integer", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "postgresql",
|
||||
}, "demo_db", "sales.orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "sales" || dbInst.colsSchema != "sales" {
|
||||
t.Fatalf("expected schema sales, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "sales"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with sales schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_NoFallbackForMySQL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columnsErr: errors.New("should not be called"),
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if ddl != dbInst.createSQL {
|
||||
t.Fatalf("expected original ddl for mysql, got: %s", ddl)
|
||||
}
|
||||
if dbInst.colsTable != "" {
|
||||
t.Fatalf("mysql path should not call GetColumns, got table=%q", dbInst.colsTable)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_FallbackWhenCreateStatementError(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createErr: errors.New("statement unsupported"),
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL for postgres error path, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
31
internal/app/methods_db_timeout_test.go
Normal file
31
internal/app/methods_db_timeout_test.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeTestConnectionConfig_DefaultToUpperBound(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 0}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("expected timeout=%d, got=%d", testConnectionTimeoutUpperBoundSeconds, got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_KeepSmallerTimeout(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 6}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != 6 {
|
||||
t.Fatalf("expected timeout=6, got=%d", got.Timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeTestConnectionConfig_ClampLargeTimeout(t *testing.T) {
|
||||
config := connection.ConnectionConfig{Type: "mongodb", Timeout: 60}
|
||||
got := normalizeTestConnectionConfig(config)
|
||||
if got.Timeout != testConnectionTimeoutUpperBoundSeconds {
|
||||
t.Fatalf("expected timeout=%d, got=%d", testConnectionTimeoutUpperBoundSeconds, got.Timeout)
|
||||
}
|
||||
}
|
||||
3870
internal/app/methods_driver.go
Normal file
3870
internal/app/methods_driver.go
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
275
internal/app/methods_file_export_test.go
Normal file
275
internal/app/methods_file_export_test.go
Normal file
@@ -0,0 +1,275 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeExportQueryDB struct {
|
||||
data []map[string]interface{}
|
||||
cols []string
|
||||
err error
|
||||
|
||||
lastQuery string
|
||||
lastContextTimeout time.Duration
|
||||
hasContextDeadline bool
|
||||
}
|
||||
|
||||
func (f *fakeExportQueryDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeExportQueryDB) Close() error { return nil }
|
||||
func (f *fakeExportQueryDB) Ping() error { return nil }
|
||||
func (f *fakeExportQueryDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
if deadline, ok := ctx.Deadline(); ok {
|
||||
f.hasContextDeadline = true
|
||||
f.lastContextTimeout = time.Until(deadline)
|
||||
}
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeExportQueryDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeExportQueryDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestFormatExportCellText_FloatNoScientificNotation(t *testing.T) {
|
||||
got := formatExportCellText(1.445663e+06)
|
||||
if strings.Contains(strings.ToLower(got), "e+") || strings.Contains(strings.ToLower(got), "e-") {
|
||||
t.Fatalf("不应输出科学计数法,got=%q", got)
|
||||
}
|
||||
if got != "1445663" {
|
||||
t.Fatalf("浮点整值导出异常,want=%q got=%q", "1445663", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_Markdown_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.md")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "md"); err != nil {
|
||||
t.Fatalf("写入 md 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 md 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("md 导出包含科学计数法: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "| 1445663 |") {
|
||||
t.Fatalf("md 导出未保留整数字面量,content=%s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_JSON_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.json")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "json"); err != nil {
|
||||
t.Fatalf("写入 json 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 json 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("json 导出包含科学计数法: %s", content)
|
||||
}
|
||||
|
||||
var decoded []map[string]json.Number
|
||||
decoder := json.NewDecoder(bytes.NewReader(contentBytes))
|
||||
decoder.UseNumber()
|
||||
if err := decoder.Decode(&decoded); err != nil {
|
||||
t.Fatalf("解析导出 json 失败: %v", err)
|
||||
}
|
||||
if len(decoded) != 1 {
|
||||
t.Fatalf("导出行数异常,got=%d", len(decoded))
|
||||
}
|
||||
if decoded[0]["id"].String() != "1445663" {
|
||||
t.Fatalf("json 数值格式异常,want=1445663 got=%s", decoded[0]["id"].String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesMinimumTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 10}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
if fake.lastQuery != "SELECT 1" {
|
||||
t.Fatalf("queryDataForExport 查询语句异常,want=%q got=%q", "SELECT 1", fake.lastQuery)
|
||||
}
|
||||
lowerBound := minExportQueryTimeout - 5*time.Second
|
||||
upperBound := minExportQueryTimeout + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出最小超时异常,want≈%s got=%s", minExportQueryTimeout, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesLargerConfiguredTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 900}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
expected := 900 * time.Second
|
||||
lowerBound := expected - 5*time.Second
|
||||
upperBound := expected + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出配置超时异常,want≈%s got=%s", expected, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_ClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_CustomClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("custom clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeAndStyle(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{
|
||||
"name": "<script>alert(1)</script>",
|
||||
"note": "line1\nline2",
|
||||
"nullable": nil,
|
||||
},
|
||||
}
|
||||
columns := []string{"name", "note", "nullable"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 html 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
|
||||
if !strings.Contains(content, "<!DOCTYPE html>") {
|
||||
t.Fatalf("html 导出缺少 doctype: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "position: sticky") {
|
||||
t.Fatalf("html 导出缺少表头吸顶样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "tbody tr:nth-child(even)") {
|
||||
t.Fatalf("html 导出缺少斑马纹样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出未进行 XSS 转义: %s", content)
|
||||
}
|
||||
if strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出包含未转义脚本: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "line1<br>line2") {
|
||||
t.Fatalf("html 导出换行未转为 <br>: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<td>NULL</td>") {
|
||||
t.Fatalf("html 导出空值显示异常: %s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeHeader(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
columnName := "<b>name</b>"
|
||||
data := []map[string]interface{}{{columnName: "ok"}}
|
||||
if err := writeRowsToFile(f, data, []string{columnName}, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
contentBytes, _ := os.ReadFile(f.Name())
|
||||
content := string(contentBytes)
|
||||
if !strings.Contains(content, "<th><b>name</b></th>") || strings.Contains(content, "<th><b>name</b></th>") {
|
||||
t.Fatalf("html 表头未正确转义: %s", content)
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,9 @@ import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
@@ -20,12 +23,20 @@ var (
|
||||
|
||||
// getRedisClient gets or creates a Redis client from cache
|
||||
func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisClient, error) {
|
||||
key := getRedisClientCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "Redis 代理准备失败:%s", formatRedisConnSummary(effectiveConfig))
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
key := getRedisClientCacheKey(connectConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
|
||||
redisCacheMu.Lock()
|
||||
defer redisCacheMu.Unlock()
|
||||
@@ -44,47 +55,69 @@ func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisCli
|
||||
|
||||
logger.Infof("创建 Redis 客户端实例:缓存Key=%s", shortKey)
|
||||
client := redis.NewRedisClient()
|
||||
if err := client.Connect(config); err != nil {
|
||||
logger.Error(err, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
return nil, err
|
||||
if err := client.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
redisCache[key] = client
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func getRedisClientCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
}
|
||||
b, _ := json.Marshal(config)
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func formatRedisConnSummary(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString("类型=redis 地址=")
|
||||
b.WriteString(config.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.Port))
|
||||
if topology := strings.TrimSpace(config.Topology); topology != "" {
|
||||
b.WriteString(" 模式=")
|
||||
b.WriteString(topology)
|
||||
}
|
||||
if len(config.Hosts) > 0 {
|
||||
b.WriteString(" 节点数=")
|
||||
b.WriteString(strconv.Itoa(len(config.Hosts)))
|
||||
}
|
||||
b.WriteString(" DB=")
|
||||
b.WriteString(string(rune(config.RedisDB + '0')))
|
||||
b.WriteString(strconv.Itoa(config.RedisDB))
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(" SSH=")
|
||||
b.WriteString(config.SSH.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.SSH.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.SSH.Port))
|
||||
b.WriteString(" 用户=")
|
||||
b.WriteString(config.SSH.User)
|
||||
}
|
||||
if config.UseProxy {
|
||||
b.WriteString(" 代理=")
|
||||
b.WriteString(strings.ToLower(strings.TrimSpace(config.Proxy.Type)))
|
||||
b.WriteString("://")
|
||||
b.WriteString(config.Proxy.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.Proxy.Port))
|
||||
if strings.TrimSpace(config.Proxy.User) != "" {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(" HTTP隧道=")
|
||||
b.WriteString(strings.TrimSpace(config.HTTPTunnel.Host))
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
@@ -107,14 +140,20 @@ func (a *App) RedisTestConnection(config connection.ConnectionConfig) connection
|
||||
}
|
||||
|
||||
// RedisScanKeys scans keys matching a pattern
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor uint64, count int64) connection.QueryResult {
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor any, count int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, cursor, count)
|
||||
parsedCursor, err := parseRedisScanCursor(cursor)
|
||||
if err != nil {
|
||||
logger.Warnf("RedisScanKeys 游标解析失败,已回退到起始游标:cursor=%v err=%v", cursor, err)
|
||||
parsedCursor = 0
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, parsedCursor, count)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisScanKeys 扫描失败:pattern=%s", pattern)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -123,6 +162,82 @@ func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string,
|
||||
return connection.QueryResult{Success: true, Data: result}
|
||||
}
|
||||
|
||||
func parseRedisScanCursor(cursor any) (uint64, error) {
|
||||
switch v := cursor.(type) {
|
||||
case nil:
|
||||
return 0, nil
|
||||
case uint64:
|
||||
return v, nil
|
||||
case uint32:
|
||||
return uint64(v), nil
|
||||
case uint16:
|
||||
return uint64(v), nil
|
||||
case uint8:
|
||||
return uint64(v), nil
|
||||
case uint:
|
||||
return uint64(v), nil
|
||||
case int64:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int32:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int16:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int8:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case float64:
|
||||
return parseRedisScanCursorFromFloat(v)
|
||||
case float32:
|
||||
return parseRedisScanCursorFromFloat(float64(v))
|
||||
case json.Number:
|
||||
return parseRedisScanCursor(strings.TrimSpace(v.String()))
|
||||
case string:
|
||||
trimmed := strings.TrimSpace(v)
|
||||
if trimmed == "" {
|
||||
return 0, nil
|
||||
}
|
||||
parsed, err := strconv.ParseUint(trimmed, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("无效游标: %q", v)
|
||||
}
|
||||
return parsed, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("不支持的游标类型: %T", cursor)
|
||||
}
|
||||
}
|
||||
|
||||
func parseRedisScanCursorFromFloat(value float64) (uint64, error) {
|
||||
if math.IsNaN(value) || math.IsInf(value, 0) {
|
||||
return 0, fmt.Errorf("无效浮点游标: %v", value)
|
||||
}
|
||||
if value < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %v", value)
|
||||
}
|
||||
if math.Trunc(value) != value {
|
||||
return 0, fmt.Errorf("游标必须为整数: %v", value)
|
||||
}
|
||||
if value > float64(math.MaxUint64) {
|
||||
return 0, fmt.Errorf("游标超出范围: %v", value)
|
||||
}
|
||||
return uint64(value), nil
|
||||
}
|
||||
|
||||
// RedisGetValue gets the value of a key
|
||||
func (a *App) RedisGetValue(config connection.ConnectionConfig, key string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
@@ -450,6 +565,40 @@ func (a *App) RedisZSetRemove(config connection.ConnectionConfig, key string, me
|
||||
return connection.QueryResult{Success: true, Message: "删除成功"}
|
||||
}
|
||||
|
||||
// RedisStreamAdd adds an entry to a stream
|
||||
func (a *App) RedisStreamAdd(config connection.ConnectionConfig, key string, fields map[string]string, id string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
newID, err := client.StreamAdd(key, fields, id)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisStreamAdd 添加失败:key=%s id=%s", key, id)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "添加成功", Data: map[string]string{"id": newID}}
|
||||
}
|
||||
|
||||
// RedisStreamDelete deletes stream entries by IDs
|
||||
func (a *App) RedisStreamDelete(config connection.ConnectionConfig, key string, ids []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
deleted, err := client.StreamDelete(key, ids...)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisStreamDelete 删除失败:key=%s ids=%v", key, ids)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "删除成功", Data: map[string]int64{"deleted": deleted}}
|
||||
}
|
||||
|
||||
// RedisFlushDB flushes the current database
|
||||
func (a *App) RedisFlushDB(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
|
||||
50
internal/app/methods_redis_cursor_test.go
Normal file
50
internal/app/methods_redis_cursor_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseRedisScanCursor(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
input any
|
||||
want uint64
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "nil defaults to zero", input: nil, want: 0},
|
||||
{name: "empty string defaults to zero", input: " ", want: 0},
|
||||
{name: "string cursor", input: "123", want: 123},
|
||||
{name: "uint64 cursor", input: uint64(456), want: 456},
|
||||
{name: "int cursor", input: int(789), want: 789},
|
||||
{name: "float cursor", input: float64(42), want: 42},
|
||||
{name: "json number cursor", input: json.Number("88"), want: 88},
|
||||
{name: "negative int rejected", input: -1, wantErr: true},
|
||||
{name: "fraction float rejected", input: float64(1.5), wantErr: true},
|
||||
{name: "invalid string rejected", input: "abc", wantErr: true},
|
||||
{name: "unsupported type rejected", input: true, wantErr: true},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := parseRedisScanCursor(tc.input)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil (value=%d)", got)
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if got != tc.want {
|
||||
t.Fatalf("parseRedisScanCursor() mismatch, want=%d got=%d", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
40
internal/app/methods_update_windows_script_test.go
Normal file
40
internal/app/methods_update_windows_script_test.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBuildWindowsScriptKeepsBatchForSyntax(t *testing.T) {
|
||||
script := buildWindowsScript(
|
||||
`C:\tmp\GoNavi-v0.4.0-windows-amd64.zip`,
|
||||
`C:\Program Files\GoNavi\GoNavi.exe`,
|
||||
`C:\Program Files\GoNavi\.gonavi-update-windows-v0.4.0`,
|
||||
`C:\Program Files\GoNavi\logs\update-install.log`,
|
||||
13579,
|
||||
)
|
||||
|
||||
mustContain := []string{
|
||||
`for %%I in ("%TARGET%") do set "TARGET_NAME=%%~nxI"`,
|
||||
`for %%I in ("%SOURCE%") do set "SOURCE_EXT=%%~xI"`,
|
||||
`for /R "%EXTRACT_DIR%" %%F in (*.exe) do (`,
|
||||
`set "SOURCE_EXE=%%~fF"`,
|
||||
}
|
||||
for _, want := range mustContain {
|
||||
if !strings.Contains(script, want) {
|
||||
t.Fatalf("windows update script missing required token: %s\nscript:\n%s", want, script)
|
||||
}
|
||||
}
|
||||
|
||||
mustNotContain := []string{
|
||||
`for %I in ("%TARGET%") do set "TARGET_NAME=%~nxI"`,
|
||||
`for %I in ("%SOURCE%") do set "SOURCE_EXT=%~xI"`,
|
||||
`for /R "%EXTRACT_DIR%" %F in (*.exe) do (`,
|
||||
`set "SOURCE_EXE=%~fF"`,
|
||||
}
|
||||
for _, bad := range mustNotContain {
|
||||
if strings.Contains(script, bad) {
|
||||
t.Fatalf("windows update script contains invalid batch syntax: %s\nscript:\n%s", bad, script)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,9 +5,69 @@ import (
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func leadingSQLKeyword(query string) string {
|
||||
text := strings.TrimSpace(query)
|
||||
for len(text) > 0 {
|
||||
trimmed := strings.TrimLeft(text, " \t\r\n")
|
||||
if trimmed == "" {
|
||||
return ""
|
||||
}
|
||||
text = trimmed
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(text, "--"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "#"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "/*"):
|
||||
if idx := strings.Index(text, "*/"); idx >= 0 {
|
||||
text = text[idx+2:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if text == "" {
|
||||
return ""
|
||||
}
|
||||
for i, r := range text {
|
||||
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' {
|
||||
continue
|
||||
}
|
||||
if i == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToLower(text[:i])
|
||||
}
|
||||
return strings.ToLower(text)
|
||||
}
|
||||
|
||||
func isReadOnlySQLQuery(dbType string, query string) bool {
|
||||
if strings.ToLower(strings.TrimSpace(dbType)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
return true
|
||||
}
|
||||
|
||||
switch leadingSQLKeyword(query) {
|
||||
case "select", "with", "show", "describe", "desc", "explain", "pragma", "values":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func sanitizeSQLForPgLike(dbType string, query string) string {
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase":
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// 有些情况下会出现多层重复引用(例如 """"schema"""" 或 ""schema"""),单次修复不一定收敛。
|
||||
// 这里做有限次数的迭代,直到输出不再变化。
|
||||
out := query
|
||||
|
||||
121
internal/app/window_translucency_darwin.go
Normal file
121
internal/app/window_translucency_darwin.go
Normal file
@@ -0,0 +1,121 @@
|
||||
//go:build darwin
|
||||
|
||||
package app
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -x objective-c -fblocks
|
||||
#cgo LDFLAGS: -framework Cocoa
|
||||
#import <Cocoa/Cocoa.h>
|
||||
#import <dispatch/dispatch.h>
|
||||
|
||||
static void gonaviTuneWindowTranslucency(NSWindow *window) {
|
||||
if (window == nil) {
|
||||
return;
|
||||
}
|
||||
CGFloat cornerRadius = 14.0;
|
||||
|
||||
[window setOpaque:NO];
|
||||
[window setBackgroundColor:[NSColor clearColor]];
|
||||
[window setHasShadow:YES];
|
||||
[window setMovableByWindowBackground:YES];
|
||||
|
||||
NSView *contentView = [window contentView];
|
||||
if (contentView == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
[contentView setWantsLayer:YES];
|
||||
[[contentView layer] setBackgroundColor:[[NSColor clearColor] CGColor]];
|
||||
[[contentView layer] setCornerRadius:cornerRadius];
|
||||
[[contentView layer] setMasksToBounds:YES];
|
||||
|
||||
NSVisualEffectView *effectView = nil;
|
||||
for (NSView *subview in [contentView subviews]) {
|
||||
if ([subview isKindOfClass:[NSVisualEffectView class]]) {
|
||||
effectView = (NSVisualEffectView *)subview;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (effectView == nil) {
|
||||
effectView = [[NSVisualEffectView alloc] initWithFrame:[contentView bounds]];
|
||||
[effectView setAutoresizingMask:NSViewWidthSizable | NSViewHeightSizable];
|
||||
[contentView addSubview:effectView positioned:NSWindowBelow relativeTo:nil];
|
||||
[effectView release];
|
||||
}
|
||||
|
||||
if (@available(macOS 10.14, *)) {
|
||||
[effectView setMaterial:NSVisualEffectMaterialHUDWindow];
|
||||
}
|
||||
[effectView setBlendingMode:NSVisualEffectBlendingModeBehindWindow];
|
||||
[effectView setState:NSVisualEffectStateActive];
|
||||
// 默认 alpha=0(不可见),由前端根据用户外观设置动态启用
|
||||
[effectView setAlphaValue:0.0];
|
||||
[effectView setWantsLayer:YES];
|
||||
[[effectView layer] setCornerRadius:cornerRadius];
|
||||
[[effectView layer] setMasksToBounds:YES];
|
||||
}
|
||||
|
||||
static void gonaviApplyWindowTranslucencyFix() {
|
||||
// 启动时应用窗口透明度修复,减少重试次数以降低启动期 GPU 负载
|
||||
for (int i = 0; i < 8; i++) {
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(i * 250 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
|
||||
for (NSWindow *window in [NSApp windows]) {
|
||||
gonaviTuneWindowTranslucency(window);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 动态设置 NSVisualEffectView 的透明度和窗口不透明标志。
|
||||
// alpha <= 0 时窗口标记为 opaque,GPU 不再持续计算窗口背后的模糊效果。
|
||||
static void gonaviSetEffectViewAlpha(double alpha) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
for (NSWindow *window in [NSApp windows]) {
|
||||
NSView *contentView = [window contentView];
|
||||
if (contentView == nil) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (NSView *subview in [contentView subviews]) {
|
||||
if ([subview isKindOfClass:[NSVisualEffectView class]]) {
|
||||
NSVisualEffectView *effectView = (NSVisualEffectView *)subview;
|
||||
[effectView setAlphaValue:alpha];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (alpha <= 0.01) {
|
||||
[window setOpaque:YES];
|
||||
} else {
|
||||
[window setOpaque:NO];
|
||||
[window setBackgroundColor:[NSColor clearColor]];
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
func applyMacWindowTranslucencyFix() {
|
||||
C.gonaviApplyWindowTranslucencyFix()
|
||||
}
|
||||
|
||||
// setMacWindowTranslucency 根据用户外观设置动态调整 macOS 窗口透明度。
|
||||
// opacity=1.0 且 blur=0 时关闭 NSVisualEffectView(alpha=0),窗口标记为 opaque,
|
||||
// GPU 不再持续计算窗口背后的模糊合成,显著降低 CPU/GPU 温度。
|
||||
func setMacWindowTranslucency(opacity float64, blur float64) {
|
||||
if opacity >= 0.999 && blur <= 0 {
|
||||
C.gonaviSetEffectViewAlpha(C.double(0.0))
|
||||
} else {
|
||||
// 半透明模式:NSVisualEffectView alpha 根据透明度动态映射
|
||||
alpha := (1.0 - opacity) * 1.2
|
||||
if alpha < 0.3 {
|
||||
alpha = 0.3
|
||||
}
|
||||
if alpha > 0.85 {
|
||||
alpha = 0.85
|
||||
}
|
||||
C.gonaviSetEffectViewAlpha(C.double(alpha))
|
||||
}
|
||||
}
|
||||
7
internal/app/window_translucency_stub.go
Normal file
7
internal/app/window_translucency_stub.go
Normal file
@@ -0,0 +1,7 @@
|
||||
//go:build !darwin
|
||||
|
||||
package app
|
||||
|
||||
func applyMacWindowTranslucencyFix() {}
|
||||
|
||||
func setMacWindowTranslucency(opacity float64, blur float64) {}
|
||||
@@ -9,20 +9,58 @@ type SSHConfig struct {
|
||||
KeyPath string `json:"keyPath"`
|
||||
}
|
||||
|
||||
// ProxyConfig holds proxy connection details
|
||||
type ProxyConfig struct {
|
||||
Type string `json:"type"` // socks5 | http
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user,omitempty"`
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// HTTPTunnelConfig holds independent HTTP CONNECT tunnel details
|
||||
type HTTPTunnelConfig struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user,omitempty"`
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// ConnectionConfig holds database connection details including SSH
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSL bool `json:"useSSL,omitempty"` // MySQL-like SSL/TLS switch
|
||||
SSLMode string `json:"sslMode,omitempty"` // preferred | required | skip-verify | disable
|
||||
SSLCertPath string `json:"sslCertPath,omitempty"` // TLS client certificate path (e.g., Dameng)
|
||||
SSLKeyPath string `json:"sslKeyPath,omitempty"` // TLS client private key path (e.g., Dameng)
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
UseHTTPTunnel bool `json:"useHttpTunnel,omitempty"`
|
||||
HTTPTunnel HTTPTunnelConfig `json:"httpTunnel,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica | cluster
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
@@ -31,6 +69,7 @@ type QueryResult struct {
|
||||
Message string `json:"message"`
|
||||
Data interface{} `json:"data"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
QueryID string `json:"queryId,omitempty"` // Unique ID for query cancellation
|
||||
}
|
||||
|
||||
// ColumnDefinition represents a table column
|
||||
@@ -51,6 +90,7 @@ type IndexDefinition struct {
|
||||
NonUnique int `json:"nonUnique"`
|
||||
SeqInIndex int `json:"seqInIndex"`
|
||||
IndexType string `json:"indexType"`
|
||||
SubPart int `json:"subPart,omitempty"`
|
||||
}
|
||||
|
||||
// ForeignKeyDefinition represents a foreign key
|
||||
@@ -89,3 +129,12 @@ type ChangeSet struct {
|
||||
Updates []UpdateRow `json:"updates"`
|
||||
Deletes []map[string]interface{} `json:"deletes"`
|
||||
}
|
||||
|
||||
type MongoMemberInfo struct {
|
||||
Host string `json:"host"`
|
||||
Role string `json:"role"`
|
||||
State string `json:"state"`
|
||||
StateCode int `json:"stateCode,omitempty"`
|
||||
Healthy bool `json:"healthy"`
|
||||
IsSelf bool `json:"isSelf,omitempty"`
|
||||
}
|
||||
|
||||
9
internal/db/agent_process_stub.go
Normal file
9
internal/db/agent_process_stub.go
Normal file
@@ -0,0 +1,9 @@
|
||||
//go:build !windows
|
||||
|
||||
package db
|
||||
|
||||
import "os/exec"
|
||||
|
||||
func configureAgentProcess(cmd *exec.Cmd) {
|
||||
_ = cmd
|
||||
}
|
||||
20
internal/db/agent_process_windows.go
Normal file
20
internal/db/agent_process_windows.go
Normal file
@@ -0,0 +1,20 @@
|
||||
//go:build windows
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
const windowsCreateNoWindow = 0x08000000
|
||||
|
||||
func configureAgentProcess(cmd *exec.Cmd) {
|
||||
if cmd == nil {
|
||||
return
|
||||
}
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
HideWindow: true,
|
||||
CreationFlags: windowsCreateNoWindow,
|
||||
}
|
||||
}
|
||||
812
internal/db/clickhouse_impl.go
Normal file
812
internal/db/clickhouse_impl.go
Normal file
@@ -0,0 +1,812 @@
|
||||
//go:build gonavi_full_drivers || gonavi_clickhouse_driver
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
clickhouse "github.com/ClickHouse/clickhouse-go/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultClickHousePort = 9000
|
||||
defaultClickHouseUser = "default"
|
||||
defaultClickHouseDatabase = "default"
|
||||
minClickHouseReadTimeout = 5 * time.Minute
|
||||
)
|
||||
|
||||
type ClickHouseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
database string
|
||||
}
|
||||
|
||||
func normalizeClickHouseConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := applyClickHouseURI(config)
|
||||
if strings.TrimSpace(normalized.Host) == "" {
|
||||
normalized.Host = "localhost"
|
||||
}
|
||||
if normalized.Port <= 0 {
|
||||
normalized.Port = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(normalized.User) == "" {
|
||||
normalized.User = defaultClickHouseUser
|
||||
}
|
||||
if strings.TrimSpace(normalized.Database) == "" {
|
||||
normalized.Database = defaultClickHouseDatabase
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
func applyClickHouseURI(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return config
|
||||
}
|
||||
lowerURI := strings.ToLower(uriText)
|
||||
if !strings.HasPrefix(lowerURI, "clickhouse://") {
|
||||
return config
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return config
|
||||
}
|
||||
|
||||
if parsed.User != nil {
|
||||
if strings.TrimSpace(config.User) == "" {
|
||||
config.User = parsed.User.Username()
|
||||
}
|
||||
if pass, ok := parsed.User.Password(); ok && config.Password == "" {
|
||||
config.Password = pass
|
||||
}
|
||||
}
|
||||
|
||||
if dbName := strings.TrimPrefix(strings.TrimSpace(parsed.Path), "/"); dbName != "" && strings.TrimSpace(config.Database) == "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
if strings.TrimSpace(config.Database) == "" {
|
||||
if dbName := strings.TrimSpace(parsed.Query().Get("database")); dbName != "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
}
|
||||
|
||||
defaultPort := config.Port
|
||||
if defaultPort <= 0 {
|
||||
defaultPort = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(config.Host) == "" {
|
||||
host, port, ok := parseHostPortWithDefault(parsed.Host, defaultPort)
|
||||
if ok {
|
||||
config.Host = host
|
||||
config.Port = port
|
||||
}
|
||||
}
|
||||
if config.Port <= 0 {
|
||||
config.Port = defaultPort
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) buildClickHouseOptions(config connection.ConnectionConfig) *clickhouse.Options {
|
||||
connectTimeout := getConnectTimeout(config)
|
||||
readTimeout := connectTimeout
|
||||
if readTimeout < minClickHouseReadTimeout {
|
||||
readTimeout = minClickHouseReadTimeout
|
||||
}
|
||||
protocol := detectClickHouseProtocol(config)
|
||||
opts := &clickhouse.Options{
|
||||
Protocol: protocol,
|
||||
Addr: []string{
|
||||
net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
},
|
||||
Auth: clickhouse.Auth{
|
||||
Database: strings.TrimSpace(config.Database),
|
||||
Username: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
},
|
||||
DialTimeout: connectTimeout,
|
||||
ReadTimeout: readTimeout,
|
||||
}
|
||||
if tlsConfig := resolveGenericTLSConfig(config); tlsConfig != nil {
|
||||
opts.TLS = tlsConfig
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func detectClickHouseProtocol(config connection.ConnectionConfig) clickhouse.Protocol {
|
||||
uriText := strings.ToLower(strings.TrimSpace(config.URI))
|
||||
if strings.HasPrefix(uriText, "http://") || strings.HasPrefix(uriText, "https://") {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
if config.Port == 8123 || config.Port == 8443 {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
return clickhouse.Native
|
||||
}
|
||||
|
||||
func isClickHouseProtocolMismatch(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
text := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
return strings.Contains(text, "unexpected packet [72]") ||
|
||||
(strings.Contains(text, "unexpected packet") && strings.Contains(text, "handshake")) ||
|
||||
strings.Contains(text, "http response to https client") ||
|
||||
strings.Contains(text, "malformed http response")
|
||||
}
|
||||
|
||||
func withClickHouseProtocol(config connection.ConnectionConfig, protocol clickhouse.Protocol) connection.ConnectionConfig {
|
||||
next := config
|
||||
switch protocol {
|
||||
case clickhouse.HTTP:
|
||||
if next.Port == 0 {
|
||||
next.Port = 8123
|
||||
}
|
||||
default:
|
||||
if next.Port == 0 {
|
||||
next.Port = defaultClickHousePort
|
||||
}
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Connect(config connection.ConnectionConfig) error {
|
||||
if supported, reason := DriverRuntimeSupportStatus("clickhouse"); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = "ClickHouse 纯 Go 驱动未启用,请先在驱动管理中安装启用"
|
||||
}
|
||||
return fmt.Errorf("%s", reason)
|
||||
}
|
||||
|
||||
if c.forwarder != nil {
|
||||
_ = c.forwarder.Close()
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
|
||||
runConfig := normalizeClickHouseConfig(config)
|
||||
c.pingTimeout = getConnectTimeout(runConfig)
|
||||
c.database = runConfig.Database
|
||||
|
||||
if runConfig.UseSSH {
|
||||
logger.Infof("ClickHouse 使用 SSH 连接:地址=%s:%d 用户=%s", runConfig.Host, runConfig.Port, runConfig.User)
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(runConfig.SSH, runConfig.Host, runConfig.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
c.forwarder = forwarder
|
||||
|
||||
host, portText, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
port, err := strconv.Atoi(portText)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
runConfig.Host = host
|
||||
runConfig.Port = port
|
||||
runConfig.UseSSH = false
|
||||
logger.Infof("ClickHouse 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
primaryProtocol := detectClickHouseProtocol(attempt)
|
||||
protocols := []clickhouse.Protocol{primaryProtocol}
|
||||
if primaryProtocol == clickhouse.Native {
|
||||
protocols = append(protocols, clickhouse.HTTP)
|
||||
} else {
|
||||
protocols = append(protocols, clickhouse.Native)
|
||||
}
|
||||
|
||||
for pIdx, protocol := range protocols {
|
||||
protocolConfig := withClickHouseProtocol(attempt, protocol)
|
||||
c.conn = clickhouse.OpenDB(c.buildClickHouseOptions(protocolConfig))
|
||||
if err := c.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败(protocol=%s): %v", idx+1, protocol.String(), err))
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
if pIdx == 0 && !isClickHouseProtocolMismatch(err) {
|
||||
// 首次连接不是协议误配特征,避免无谓重试次协议。
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("ClickHouse SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
if pIdx > 0 {
|
||||
logger.Warnf("ClickHouse 已自动切换连接协议为 %s(常见于 8123/8443 HTTP 端口)", protocol.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
_ = c.Close()
|
||||
return fmt.Errorf("连接建立后验证失败(可检查 ClickHouse 端口与协议是否匹配:Native=9000/9440,HTTP=8123/8443):%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Close() error {
|
||||
if c.forwarder != nil {
|
||||
if err := c.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 ClickHouse SSH 端口转发失败:%v", err)
|
||||
}
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
return c.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Ping() error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := c.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return c.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
rows, err := c.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
rows, err := c.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Exec(query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := c.Query("SELECT name FROM system.databases ORDER BY name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "database"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTables(dbName string) ([]string, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(
|
||||
"SELECT name FROM system.tables WHERE database = '%s' ORDER BY name",
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = "SELECT database, name FROM system.tables ORDER BY database, name"
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if targetDB != "" {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "table", "table_name"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
databaseValue, hasDB := getClickHouseValueFromRow(row, "database", "schema_name")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "name", "table", "table_name")
|
||||
if hasDB && hasTable {
|
||||
result = append(result, fmt.Sprintf("%v.%v", databaseValue, tableValue))
|
||||
continue
|
||||
}
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", quoteClickHouseIdentifier(database), quoteClickHouseIdentifier(table))
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(data) == 0 {
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
row := data[0]
|
||||
if val, ok := getClickHouseValueFromRow(row, "statement", "create_statement", "sql", "query"); ok {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", val))
|
||||
if text != "" {
|
||||
return text, nil
|
||||
}
|
||||
}
|
||||
|
||||
longest := ""
|
||||
for _, value := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", value))
|
||||
if text == "" {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(strings.ToUpper(text), "CREATE ") && len(text) > len(longest) {
|
||||
longest = text
|
||||
}
|
||||
}
|
||||
if longest != "" {
|
||||
return longest, nil
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
name,
|
||||
type,
|
||||
default_kind,
|
||||
default_expression,
|
||||
is_in_primary_key,
|
||||
is_in_sorting_key,
|
||||
comment
|
||||
FROM system.columns
|
||||
WHERE database = '%s' AND table = '%s'
|
||||
ORDER BY position`,
|
||||
escapeClickHouseSQLLiteral(database),
|
||||
escapeClickHouseSQLLiteral(table),
|
||||
)
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
columns := make([]connection.ColumnDefinition, 0, len(data))
|
||||
for _, row := range data {
|
||||
nameValue, _ := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
defaultKind, _ := getClickHouseValueFromRow(row, "default_kind")
|
||||
defaultExpr, hasDefault := getClickHouseValueFromRow(row, "default_expression", "column_default")
|
||||
commentValue, _ := getClickHouseValueFromRow(row, "comment")
|
||||
inPrimary, _ := getClickHouseValueFromRow(row, "is_in_primary_key")
|
||||
inSorting, _ := getClickHouseValueFromRow(row, "is_in_sorting_key")
|
||||
|
||||
colType := strings.TrimSpace(fmt.Sprintf("%v", typeValue))
|
||||
nullable := "NO"
|
||||
if strings.HasPrefix(strings.ToLower(colType), "nullable(") {
|
||||
nullable = "YES"
|
||||
}
|
||||
|
||||
key := ""
|
||||
if isClickHouseTruthy(inPrimary) {
|
||||
key = "PRI"
|
||||
} else if isClickHouseTruthy(inSorting) {
|
||||
key = "MUL"
|
||||
}
|
||||
|
||||
extra := ""
|
||||
kindText := strings.ToUpper(strings.TrimSpace(fmt.Sprintf("%v", defaultKind)))
|
||||
if kindText != "" && kindText != "DEFAULT" {
|
||||
extra = kindText
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinition{
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: colType,
|
||||
Nullable: nullable,
|
||||
Key: key,
|
||||
Extra: extra,
|
||||
Comment: strings.TrimSpace(fmt.Sprintf("%v", commentValue)),
|
||||
}
|
||||
if hasDefault && defaultExpr != nil {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", defaultExpr))
|
||||
if text != "" {
|
||||
col.Default = &text
|
||||
}
|
||||
}
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database = '%s'
|
||||
ORDER BY table, position`,
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = `
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA')
|
||||
ORDER BY database, table, position`
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]connection.ColumnDefinitionWithTable, 0, len(data))
|
||||
for _, row := range data {
|
||||
databaseValue, _ := getClickHouseValueFromRow(row, "database")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "table", "table_name")
|
||||
nameValue, hasName := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
if !hasTable || !hasName {
|
||||
continue
|
||||
}
|
||||
|
||||
tableName := strings.TrimSpace(fmt.Sprintf("%v", tableValue))
|
||||
if targetDB == "" {
|
||||
dbText := strings.TrimSpace(fmt.Sprintf("%v", databaseValue))
|
||||
if dbText != "" {
|
||||
tableName = dbText + "." + tableName
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: strings.TrimSpace(fmt.Sprintf("%v", typeValue)),
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) resolveDatabaseAndTable(dbName, tableName string) (string, string, error) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
if rawTable == "" {
|
||||
return "", "", fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
resolvedDB := strings.TrimSpace(dbName)
|
||||
resolvedTable := rawTable
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
if dbPart := normalizeClickHouseIdentifierPart(parts[0]); dbPart != "" {
|
||||
resolvedDB = dbPart
|
||||
}
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(parts[1])
|
||||
} else {
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(rawTable)
|
||||
}
|
||||
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = defaultClickHouseDatabase
|
||||
}
|
||||
if resolvedTable == "" {
|
||||
return "", "", fmt.Errorf("table name required")
|
||||
}
|
||||
return resolvedDB, resolvedTable, nil
|
||||
}
|
||||
|
||||
func normalizeClickHouseIdentifierPart(raw string) string {
|
||||
text := strings.TrimSpace(raw)
|
||||
if len(text) >= 2 {
|
||||
first := text[0]
|
||||
last := text[len(text)-1]
|
||||
if (first == '`' && last == '`') || (first == '"' && last == '"') {
|
||||
text = text[1 : len(text)-1]
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(text)
|
||||
}
|
||||
|
||||
func quoteClickHouseIdentifier(raw string) string {
|
||||
return "`" + strings.ReplaceAll(strings.TrimSpace(raw), "`", "``") + "`"
|
||||
}
|
||||
|
||||
func escapeClickHouseSQLLiteral(raw string) string {
|
||||
return strings.ReplaceAll(strings.TrimSpace(raw), "'", "''")
|
||||
}
|
||||
|
||||
func getClickHouseValueFromRow(row map[string]interface{}, keys ...string) (interface{}, bool) {
|
||||
if len(row) == 0 {
|
||||
return nil, false
|
||||
}
|
||||
for _, key := range keys {
|
||||
if value, ok := row[key]; ok {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
for existingKey, value := range row {
|
||||
for _, key := range keys {
|
||||
if strings.EqualFold(existingKey, key) {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func isClickHouseTruthy(value interface{}) bool {
|
||||
switch val := value.(type) {
|
||||
case bool:
|
||||
return val
|
||||
case int:
|
||||
return val != 0
|
||||
case int8:
|
||||
return val != 0
|
||||
case int16:
|
||||
return val != 0
|
||||
case int32:
|
||||
return val != 0
|
||||
case int64:
|
||||
return val != 0
|
||||
case uint:
|
||||
return val != 0
|
||||
case uint8:
|
||||
return val != 0
|
||||
case uint16:
|
||||
return val != 0
|
||||
case uint32:
|
||||
return val != 0
|
||||
case uint64:
|
||||
return val != 0
|
||||
case string:
|
||||
normalized := strings.ToLower(strings.TrimSpace(val))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
default:
|
||||
normalized := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", value)))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
database, table, err := c.resolveDatabaseAndTable(c.database, tableName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
qualifiedTable := fmt.Sprintf("%s.%s", quoteClickHouseIdentifier(database), quoteClickHouseIdentifier(table))
|
||||
|
||||
for _, pk := range changes.Deletes {
|
||||
whereExpr := buildClickHouseWhereClause(pk)
|
||||
if whereExpr == "" {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("ALTER TABLE %s DELETE WHERE %s", qualifiedTable, whereExpr)
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("delete error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
for _, update := range changes.Updates {
|
||||
setExpr := buildClickHouseAssignments(update.Values)
|
||||
whereExpr := buildClickHouseWhereClause(update.Keys)
|
||||
if setExpr == "" || whereExpr == "" {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("ALTER TABLE %s UPDATE %s WHERE %s", qualifiedTable, setExpr, whereExpr)
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("update error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
for _, row := range changes.Inserts {
|
||||
query, err := buildClickHouseInsertSQL(qualifiedTable, row)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if query == "" {
|
||||
continue
|
||||
}
|
||||
if _, err := c.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("insert error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildClickHouseInsertSQL(qualifiedTable string, row map[string]interface{}) (string, error) {
|
||||
if len(row) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
cols := make([]string, 0, len(row))
|
||||
for k := range row {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
if len(cols) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
sort.Strings(cols)
|
||||
quotedCols := make([]string, 0, len(cols))
|
||||
values := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
quotedCols = append(quotedCols, quoteClickHouseIdentifier(col))
|
||||
values = append(values, clickHouseLiteral(row[col]))
|
||||
}
|
||||
return fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(quotedCols, ", "), strings.Join(values, ", ")), nil
|
||||
}
|
||||
|
||||
func buildClickHouseAssignments(values map[string]interface{}) string {
|
||||
if len(values) == 0 {
|
||||
return ""
|
||||
}
|
||||
cols := make([]string, 0, len(values))
|
||||
for k := range values {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
sort.Strings(cols)
|
||||
parts := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", quoteClickHouseIdentifier(col), clickHouseLiteral(values[col])))
|
||||
}
|
||||
return strings.Join(parts, ", ")
|
||||
}
|
||||
|
||||
func buildClickHouseWhereClause(keys map[string]interface{}) string {
|
||||
if len(keys) == 0 {
|
||||
return ""
|
||||
}
|
||||
cols := make([]string, 0, len(keys))
|
||||
for k := range keys {
|
||||
if strings.TrimSpace(k) == "" {
|
||||
continue
|
||||
}
|
||||
cols = append(cols, k)
|
||||
}
|
||||
sort.Strings(cols)
|
||||
parts := make([]string, 0, len(cols))
|
||||
for _, col := range cols {
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", quoteClickHouseIdentifier(col), clickHouseLiteral(keys[col])))
|
||||
}
|
||||
return strings.Join(parts, " AND ")
|
||||
}
|
||||
|
||||
func clickHouseLiteral(value interface{}) string {
|
||||
switch val := value.(type) {
|
||||
case nil:
|
||||
return "NULL"
|
||||
case bool:
|
||||
if val {
|
||||
return "1"
|
||||
}
|
||||
return "0"
|
||||
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case time.Time:
|
||||
return fmt.Sprintf("'%s'", val.Format("2006-01-02 15:04:05"))
|
||||
case []byte:
|
||||
return fmt.Sprintf("'%s'", strings.ReplaceAll(string(val), "'", "''"))
|
||||
default:
|
||||
return fmt.Sprintf("'%s'", strings.ReplaceAll(fmt.Sprintf("%v", val), "'", "''"))
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
//go:build gonavi_full_drivers || gonavi_dameng_driver
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
@@ -34,6 +36,14 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
if config.Database != "" {
|
||||
q.Set("schema", config.Database)
|
||||
}
|
||||
if config.UseSSL {
|
||||
if certPath := strings.TrimSpace(config.SSLCertPath); certPath != "" {
|
||||
q.Set("SSL_CERT_PATH", certPath)
|
||||
}
|
||||
if keyPath := strings.TrimSpace(config.SSLKeyPath); keyPath != "" {
|
||||
q.Set("SSL_KEY_PATH", keyPath)
|
||||
}
|
||||
}
|
||||
if escapedPassword != config.Password {
|
||||
// 达梦驱动要求:密码包含特殊字符时,password 需 PathEscape,并添加 escapeProcess=true 让驱动解码。
|
||||
q.Set("escapeProcess", "true")
|
||||
@@ -48,8 +58,12 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
if runConfig.UseSSL {
|
||||
if strings.TrimSpace(runConfig.SSLCertPath) == "" || strings.TrimSpace(runConfig.SSLKeyPath) == "" {
|
||||
return fmt.Errorf("达梦启用 SSL 需要同时配置证书路径(sslCertPath)与私钥路径(sslKeyPath)")
|
||||
}
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -78,22 +92,37 @@ func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = d.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("达梦数据库通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = d.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(config)
|
||||
if err := d.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := d.getDSN(attempt)
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := d.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
d.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("达梦 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (d *DamengDB) Close() error {
|
||||
@@ -175,22 +204,9 @@ func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetDatabases() ([]string, error) {
|
||||
// DM: List Users/Schemas
|
||||
data, _, err := d.Query("SELECT username FROM dba_users")
|
||||
if err != nil {
|
||||
// Fallback if dba_users not accessible
|
||||
data, _, err = d.Query("SELECT username FROM all_users")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["USERNAME"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
// 达梦在本项目中将 schema/owner 作为“数据库”展示口径。
|
||||
// 先查当前 schema / 当前用户,再聚合可见用户与 owner,避免权限受限时返回空列表。
|
||||
return collectDamengDatabaseNames(d.Query)
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
|
||||
91
internal/db/dameng_metadata.go
Normal file
91
internal/db/dameng_metadata.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var damengDatabaseQueries = []string{
|
||||
"SELECT SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') AS DATABASE_NAME FROM DUAL",
|
||||
"SELECT SYS_CONTEXT('USERENV', 'CURRENT_USER') AS DATABASE_NAME FROM DUAL",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM USER_USERS",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM ALL_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM SYS.DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT DISTINCT OWNER AS DATABASE_NAME FROM ALL_OBJECTS ORDER BY OWNER",
|
||||
"SELECT DISTINCT OWNER AS DATABASE_NAME FROM ALL_TABLES ORDER BY OWNER",
|
||||
}
|
||||
|
||||
type damengQueryFunc func(query string) ([]map[string]interface{}, []string, error)
|
||||
|
||||
func collectDamengDatabaseNames(query damengQueryFunc) ([]string, error) {
|
||||
seen := make(map[string]struct{})
|
||||
dbs := make([]string, 0, 64)
|
||||
var lastErr error
|
||||
|
||||
for _, q := range damengDatabaseQueries {
|
||||
data, _, err := query(q)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
for _, row := range data {
|
||||
name := getDamengRowString(row,
|
||||
"DATABASE_NAME",
|
||||
"USERNAME",
|
||||
"OWNER",
|
||||
"SCHEMA_NAME",
|
||||
"CURRENT_SCHEMA",
|
||||
"CURRENT_USER",
|
||||
)
|
||||
if name == "" {
|
||||
for _, v := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
continue
|
||||
}
|
||||
name = text
|
||||
break
|
||||
}
|
||||
}
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
key := strings.ToUpper(name)
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
dbs = append(dbs, name)
|
||||
}
|
||||
}
|
||||
|
||||
if len(dbs) == 0 && lastErr != nil {
|
||||
return nil, lastErr
|
||||
}
|
||||
|
||||
sort.Slice(dbs, func(i, j int) bool {
|
||||
return strings.ToUpper(dbs[i]) < strings.ToUpper(dbs[j])
|
||||
})
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func getDamengRowString(row map[string]interface{}, keys ...string) string {
|
||||
if len(row) == 0 {
|
||||
return ""
|
||||
}
|
||||
for _, key := range keys {
|
||||
for k, v := range row {
|
||||
if !strings.EqualFold(strings.TrimSpace(k), strings.TrimSpace(key)) {
|
||||
continue
|
||||
}
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
return ""
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
73
internal/db/dameng_metadata_test.go
Normal file
73
internal/db/dameng_metadata_test.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCollectDamengDatabaseNames_UsesCurrentSchemaFallback(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
switch query {
|
||||
case damengDatabaseQueries[0]:
|
||||
return []map[string]interface{}{{"DATABASE_NAME": "APP_SCHEMA"}}, nil, nil
|
||||
case damengDatabaseQueries[1]:
|
||||
return []map[string]interface{}{{"DATABASE_NAME": "app_schema"}}, nil, nil
|
||||
default:
|
||||
return nil, nil, errors.New("permission denied")
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("collectDamengDatabaseNames 返回错误: %v", err)
|
||||
}
|
||||
|
||||
want := []string{"APP_SCHEMA"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected database names, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCollectDamengDatabaseNames_CollectsOwnersWhenVisible(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
switch query {
|
||||
case damengDatabaseQueries[0], damengDatabaseQueries[1], damengDatabaseQueries[2], damengDatabaseQueries[3], damengDatabaseQueries[4], damengDatabaseQueries[5]:
|
||||
return []map[string]interface{}{}, nil, nil
|
||||
case damengDatabaseQueries[6]:
|
||||
return []map[string]interface{}{{"OWNER": "BIZ"}, {"OWNER": "audit"}}, nil, nil
|
||||
case damengDatabaseQueries[7]:
|
||||
return []map[string]interface{}{{"OWNER": "BIZ"}}, nil, nil
|
||||
default:
|
||||
return nil, nil, nil
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("collectDamengDatabaseNames 返回错误: %v", err)
|
||||
}
|
||||
|
||||
want := []string{"audit", "BIZ"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected database names, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCollectDamengDatabaseNames_ReturnsErrorWhenNoNameResolved(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expectErr := errors.New("last query failed")
|
||||
got, err := collectDamengDatabaseNames(func(query string) ([]map[string]interface{}, []string, error) {
|
||||
if query == damengDatabaseQueries[len(damengDatabaseQueries)-1] {
|
||||
return nil, nil, expectErr
|
||||
}
|
||||
return nil, nil, errors.New("permission denied")
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatalf("期望返回错误,实际 got=%v", got)
|
||||
}
|
||||
if !errors.Is(err, expectErr) {
|
||||
t.Fatalf("错误不符合预期: %v", err)
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package db
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Database interface {
|
||||
@@ -25,28 +26,61 @@ type BatchApplier interface {
|
||||
ApplyChanges(tableName string, changes connection.ChangeSet) error
|
||||
}
|
||||
|
||||
// Factory
|
||||
func NewDatabase(dbType string) (Database, error) {
|
||||
switch dbType {
|
||||
case "mysql":
|
||||
return &MySQLDB{}, nil
|
||||
case "postgres":
|
||||
return &PostgresDB{}, nil
|
||||
case "sqlite":
|
||||
return &SQLiteDB{}, nil
|
||||
case "oracle":
|
||||
return &OracleDB{}, nil
|
||||
case "dameng":
|
||||
return &DamengDB{}, nil
|
||||
case "kingbase":
|
||||
return &KingbaseDB{}, nil
|
||||
case "custom":
|
||||
return &CustomDB{}, nil
|
||||
default:
|
||||
// Default to MySQL for backward compatibility if empty
|
||||
if dbType == "" {
|
||||
return &MySQLDB{}, nil
|
||||
type databaseFactory func() Database
|
||||
|
||||
var databaseFactories = map[string]databaseFactory{
|
||||
"mysql": func() Database {
|
||||
return &MySQLDB{}
|
||||
},
|
||||
"postgres": func() Database {
|
||||
return &PostgresDB{}
|
||||
},
|
||||
"oracle": func() Database {
|
||||
return &OracleDB{}
|
||||
},
|
||||
"custom": func() Database {
|
||||
return &CustomDB{}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerOptionalDatabaseFactories()
|
||||
}
|
||||
|
||||
func registerDatabaseFactory(factory databaseFactory, dbTypes ...string) {
|
||||
if factory == nil || len(dbTypes) == 0 {
|
||||
return
|
||||
}
|
||||
for _, dbType := range dbTypes {
|
||||
normalized := normalizeDatabaseType(dbType)
|
||||
if normalized == "" {
|
||||
continue
|
||||
}
|
||||
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||
databaseFactories[normalized] = factory
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeDatabaseType(dbType string) string {
|
||||
normalized := strings.ToLower(strings.TrimSpace(dbType))
|
||||
switch normalized {
|
||||
case "doris":
|
||||
return "diros"
|
||||
case "postgresql":
|
||||
return "postgres"
|
||||
default:
|
||||
return normalized
|
||||
}
|
||||
}
|
||||
|
||||
// Factory
|
||||
func NewDatabase(dbType string) (Database, error) {
|
||||
normalized := normalizeDatabaseType(dbType)
|
||||
if normalized == "" {
|
||||
normalized = "mysql"
|
||||
}
|
||||
factory, ok := databaseFactories[normalized]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||
}
|
||||
return factory(), nil
|
||||
}
|
||||
|
||||
19
internal/db/database_optional_factories_full.go
Normal file
19
internal/db/database_optional_factories_full.go
Normal file
@@ -0,0 +1,19 @@
|
||||
//go:build gonavi_full_drivers
|
||||
|
||||
package db
|
||||
|
||||
func registerOptionalDatabaseFactories() {
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mariadb"), "mariadb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("diros"), "diros", "doris")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("sphinx"), "sphinx")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("sqlserver"), "sqlserver")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("sqlite"), "sqlite")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("duckdb"), "duckdb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("dameng"), "dameng")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("kingbase"), "kingbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("highgo"), "highgo")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("vastbase"), "vastbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mongodb"), "mongodb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("tdengine"), "tdengine")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("clickhouse"), "clickhouse")
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user