mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 20:29:43 +08:00
Compare commits
29 Commits
release/0.
...
release/0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
797ba27d20 | ||
|
|
ed1f40e04a | ||
|
|
2b190e564f | ||
|
|
1c050aefd0 | ||
|
|
75a5a322e0 | ||
|
|
61d6197fe3 | ||
|
|
6157161293 | ||
|
|
0f843a7dcf | ||
|
|
fb65b553e9 | ||
|
|
1a5bf79dd3 | ||
|
|
dea096d4c2 | ||
|
|
04f8b266d3 | ||
|
|
b53227cb15 | ||
|
|
0246d7fae5 | ||
|
|
4aa177ed37 | ||
|
|
4f5a7bd94b | ||
|
|
00c6f9871f | ||
|
|
6a4b397ecc | ||
|
|
3973038aea | ||
|
|
71b41459e7 | ||
|
|
69942bb77e | ||
|
|
f372b20a68 | ||
|
|
e6da986927 | ||
|
|
4570516678 | ||
|
|
8c91d8929b | ||
|
|
786835c9bc | ||
|
|
f2fc7cbd05 | ||
|
|
4bfdb2cb6c | ||
|
|
494484eb92 |
26
.github/release.yaml
vendored
Normal file
26
.github/release.yaml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: 新功能
|
||||
labels:
|
||||
- feature
|
||||
- enhancement
|
||||
- feat
|
||||
- title: 问题修复
|
||||
labels:
|
||||
- bug
|
||||
- fix
|
||||
- title: 文档与流程
|
||||
labels:
|
||||
- docs
|
||||
- documentation
|
||||
- ci
|
||||
- workflow
|
||||
- chore
|
||||
- title: 重构与优化
|
||||
labels:
|
||||
- refactor
|
||||
- perf
|
||||
- optimization
|
||||
- title: 其他更新
|
||||
labels:
|
||||
- '*'
|
||||
1
.github/workflows/release.yml
vendored
1
.github/workflows/release.yml
vendored
@@ -550,5 +550,6 @@ jobs:
|
||||
files: release-assets/*
|
||||
draft: true
|
||||
make_latest: true
|
||||
generate_release_notes: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
342
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
342
.github/workflows/test-build-all-platforms.yml
vendored
Normal file
@@ -0,0 +1,342 @@
|
||||
name: Test Build All Platforms (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: test-build-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-latest
|
||||
platform: darwin/amd64
|
||||
os_name: MacOS
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-darwin-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: macos-latest
|
||||
platform: darwin/arm64
|
||||
os_name: MacOS
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-darwin-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/amd64
|
||||
os_name: Windows
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-windows-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: windows-latest
|
||||
platform: windows/arm64
|
||||
os_name: Windows
|
||||
arch_name: Arm64
|
||||
build_name: gonavi-test-windows-arm64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: ""
|
||||
- os: ubuntu-22.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64
|
||||
wails_tags: ""
|
||||
artifact_suffix: ""
|
||||
build_optional_agents: true
|
||||
linux_webkit: "4.0"
|
||||
- os: ubuntu-24.04
|
||||
platform: linux/amd64
|
||||
os_name: Linux
|
||||
arch_name: Amd64
|
||||
build_name: gonavi-test-linux-amd64-webkit41
|
||||
wails_tags: "webkit2_41"
|
||||
artifact_suffix: "-WebKit41"
|
||||
build_optional_agents: false
|
||||
linux_webkit: "4.1"
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.24'
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev
|
||||
|
||||
if [ "${{ matrix.linux_webkit }}" = "4.1" ]; then
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libsoup-3.0-dev
|
||||
else
|
||||
sudo apt-get install -y libwebkit2gtk-4.0-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y libfuse2 || sudo apt-get install -y libfuse2t64 || true
|
||||
|
||||
LINUXDEPLOY_URL="https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
|
||||
PLUGIN_URL="https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/releases/download/continuous/linuxdeploy-plugin-gtk-x86_64.AppImage"
|
||||
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy "$LINUXDEPLOY_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 -O /tmp/linuxdeploy-plugin-gtk "$PLUGIN_URL" || {
|
||||
echo "skip-appimage=true" >> "$GITHUB_ENV"
|
||||
}
|
||||
|
||||
if [ "${skip-appimage:-false}" != "true" ]; then
|
||||
chmod +x /tmp/linuxdeploy /tmp/linuxdeploy-plugin-gtk
|
||||
fi
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = Join-Path $mingwBin 'gcc.exe'
|
||||
$gxx = Join-Path $mingwBin 'g++.exe'
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
- name: Build App
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
else
|
||||
wails build -platform "${{ matrix.platform }}" -clean -o "${{ matrix.build_name }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
fi
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "跳过 DuckDB driver: ${GOOS}/${GOARCH}"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
fi
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build -tags "$TAG" -trimpath -ldflags "-s -w" -o "$OUTPUT_PATH" ./cmd/optional-driver-agent
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Package macOS
|
||||
if: contains(matrix.platform, 'darwin')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
brew install create-dmg
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
APP_PATH=$(find . -maxdepth 1 -name "*.app" | head -n 1)
|
||||
if [ -z "$APP_PATH" ]; then
|
||||
echo "未找到 .app 应用包"
|
||||
exit 1
|
||||
fi
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
codesign --force --deep --sign - "$APP_NAME"
|
||||
ZIP_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
DMG_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}-run${GITHUB_RUN_NUMBER}.dmg"
|
||||
mkdir -p ../../artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_NAME" "../../artifacts/$ZIP_NAME"
|
||||
create-dmg \
|
||||
--volname "GoNavi Test Installer" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 800 400 \
|
||||
--icon-size 100 \
|
||||
--icon "$APP_NAME" 200 190 \
|
||||
--hide-extension "$APP_NAME" \
|
||||
--app-drop-link 600 185 \
|
||||
"$DMG_NAME" \
|
||||
"$APP_NAME"
|
||||
mv "$DMG_NAME" "../../artifacts/$DMG_NAME"
|
||||
shasum -a 256 "../../artifacts/$ZIP_NAME" > "../../artifacts/$ZIP_NAME.sha256"
|
||||
shasum -a 256 "../../artifacts/$DMG_NAME" > "../../artifacts/$DMG_NAME.sha256"
|
||||
|
||||
- name: Package Windows
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: pwsh
|
||||
run: |
|
||||
$label = "${{ inputs.build_label }}"
|
||||
if ([string]::IsNullOrWhiteSpace($label)) { $label = 'test' }
|
||||
Set-Location build/bin
|
||||
$target = "${{ matrix.build_name }}"
|
||||
$finalExeName = "GoNavi-$label-${{ matrix.os_name }}-${{ matrix.arch_name }}-run$env:GITHUB_RUN_NUMBER.exe"
|
||||
$finalZipName = "GoNavi-$label-${{ matrix.os_name }}-${{ matrix.arch_name }}-run$env:GITHUB_RUN_NUMBER.zip"
|
||||
if (Test-Path "$target.exe") {
|
||||
$finalExe = "$target.exe"
|
||||
} elseif (Test-Path "$target") {
|
||||
Rename-Item -Path "$target" -NewName "$target.exe"
|
||||
$finalExe = "$target.exe"
|
||||
} else {
|
||||
Write-Error "未找到构建产物 '$target'"
|
||||
exit 1
|
||||
}
|
||||
New-Item -ItemType Directory -Force -Path ..\..\artifacts | Out-Null
|
||||
Copy-Item -LiteralPath $finalExe -Destination "..\..\artifacts\$finalExeName" -Force
|
||||
Compress-Archive -LiteralPath $finalExe -DestinationPath "..\..\artifacts\$finalZipName" -Force
|
||||
Get-FileHash "..\..\artifacts\$finalExeName" -Algorithm SHA256 | ForEach-Object { "{0} *{1}" -f $_.Hash.ToLower(), (Split-Path $_.Path -Leaf) } | Out-File "..\..\artifacts\$finalExeName.sha256" -Encoding ascii
|
||||
Get-FileHash "..\..\artifacts\$finalZipName" -Algorithm SHA256 | ForEach-Object { "{0} *{1}" -f $_.Hash.ToLower(), (Split-Path $_.Path -Leaf) } | Out-File "..\..\artifacts\$finalZipName.sha256" -Encoding ascii
|
||||
|
||||
- name: Package Linux
|
||||
if: contains(matrix.platform, 'linux')
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.build_name }}"
|
||||
TAR_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.tar.gz"
|
||||
APPIMAGE_NAME="GoNavi-${LABEL}-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}-run${GITHUB_RUN_NUMBER}.AppImage"
|
||||
mkdir -p ../../artifacts
|
||||
|
||||
if [ ! -f "$TARGET" ]; then
|
||||
echo "未找到构建产物 '$TARGET'"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x "$TARGET"
|
||||
tar -czvf "../../artifacts/$TAR_NAME" "$TARGET"
|
||||
sha256sum "../../artifacts/$TAR_NAME" > "../../artifacts/$TAR_NAME.sha256"
|
||||
|
||||
if [ "${skip-appimage:-false}" = "true" ]; then
|
||||
echo "跳过 AppImage 打包"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p AppDir/usr/bin AppDir/usr/share/applications AppDir/usr/share/icons/hicolor/256x256/apps
|
||||
cp "$TARGET" AppDir/usr/bin/gonavi
|
||||
printf '%s\n' '[Desktop Entry]' 'Name=GoNavi' 'Exec=gonavi' 'Icon=gonavi' 'Type=Application' 'Categories=Development;Database;' 'Comment=Database Management Tool' > AppDir/usr/share/applications/gonavi.desktop
|
||||
cp AppDir/usr/share/applications/gonavi.desktop AppDir/gonavi.desktop
|
||||
if [ -f "../../build/appicon.png" ]; then
|
||||
cp "../../build/appicon.png" AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
cp "../../build/appicon.png" AppDir/gonavi.png
|
||||
else
|
||||
touch AppDir/gonavi.png
|
||||
cp AppDir/gonavi.png AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
fi
|
||||
export DEPLOY_GTK_VERSION=3
|
||||
/tmp/linuxdeploy --appdir AppDir --plugin gtk --output appimage || exit 0
|
||||
mv GoNavi*.AppImage "$APPIMAGE_NAME" 2>/dev/null || exit 0
|
||||
mv "$APPIMAGE_NAME" "../../artifacts/$APPIMAGE_NAME"
|
||||
sha256sum "../../artifacts/$APPIMAGE_NAME" > "../../artifacts/$APPIMAGE_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-build-${{ matrix.build_name }}-run${{ github.run_number }}
|
||||
path: |
|
||||
artifacts/*
|
||||
drivers/**
|
||||
if-no-files-found: error
|
||||
retention-days: 7
|
||||
91
.github/workflows/test-macos-build.yml
vendored
Normal file
91
.github/workflows/test-macos-build.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: Test Build macOS (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_label:
|
||||
description: "测试包标识(仅用于文件名)"
|
||||
required: false
|
||||
default: "test"
|
||||
push:
|
||||
branches:
|
||||
- feature/kingbase_opt
|
||||
paths:
|
||||
- ".github/workflows/test-macos-build.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-macos:
|
||||
name: Build macOS ${{ matrix.arch }}
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: darwin/amd64
|
||||
arch: amd64
|
||||
- platform: darwin/arm64
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24.3"
|
||||
check-latest: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Install Wails
|
||||
run: go install github.com/wailsapp/wails/v2/cmd/wails@v2.11.0
|
||||
|
||||
- name: Build App
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OUTPUT_NAME="gonavi-test-${{ matrix.arch }}"
|
||||
BUILD_LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$BUILD_LABEL" ]; then
|
||||
BUILD_LABEL="test"
|
||||
fi
|
||||
APP_VERSION="${BUILD_LABEL}-${GITHUB_RUN_NUMBER}"
|
||||
wails build \
|
||||
-platform "${{ matrix.platform }}" \
|
||||
-clean \
|
||||
-o "$OUTPUT_NAME" \
|
||||
-ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${APP_VERSION}"
|
||||
|
||||
- name: Package Zip
|
||||
run: |
|
||||
set -euo pipefail
|
||||
APP_PATH="build/bin/gonavi-test-${{ matrix.arch }}.app"
|
||||
if [ ! -d "$APP_PATH" ]; then
|
||||
APP_PATH=$(find build/bin -maxdepth 1 -name "*.app" | head -n 1 || true)
|
||||
fi
|
||||
if [ -z "$APP_PATH" ] || [ ! -d "$APP_PATH" ]; then
|
||||
echo "未找到 .app 产物"
|
||||
ls -la build/bin || true
|
||||
exit 1
|
||||
fi
|
||||
LABEL="${{ inputs.build_label }}"
|
||||
if [ -z "$LABEL" ]; then
|
||||
LABEL="test"
|
||||
fi
|
||||
ZIP_NAME="GoNavi-${LABEL}-macos-${{ matrix.arch }}-run${GITHUB_RUN_NUMBER}.zip"
|
||||
mkdir -p artifacts
|
||||
ditto -c -k --sequesterRsrc --keepParent "$APP_PATH" "artifacts/$ZIP_NAME"
|
||||
shasum -a 256 "artifacts/$ZIP_NAME" > "artifacts/$ZIP_NAME.sha256"
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gonavi-macos-${{ matrix.arch }}-run${{ github.run_number }}
|
||||
path: artifacts/*
|
||||
if-no-files-found: error
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -19,3 +19,6 @@ GoNavi-Wails.exe
|
||||
.ace-tool/
|
||||
.claude/
|
||||
tmpclaude-*
|
||||
|
||||
CLAUDE.md
|
||||
**/CLAUDE.md
|
||||
|
||||
155
CONTRIBUTING.md
Normal file
155
CONTRIBUTING.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Contributing Guide
|
||||
|
||||
Thank you for contributing to this project.
|
||||
|
||||
This repository follows a release-first workflow: `main` is the default public branch, while releases are prepared through `release/*` branches.
|
||||
|
||||
---
|
||||
|
||||
## Branch Model
|
||||
|
||||
- `main`: stable release branch and default branch
|
||||
- `dev`: day-to-day integration branch for maintainers
|
||||
- `release/*`: release preparation branches for maintainers
|
||||
- Recommended branch names for external contributors:
|
||||
- `fix/*`: bug fixes
|
||||
- `feature/*`: new features or enhancements
|
||||
|
||||
Maintainer release flow:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## How External Contributors Should Open Pull Requests
|
||||
|
||||
Whether your branch is `fix/*` or `feature/*`, external contributors should **open pull requests directly against `main`**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- `main` is the default branch, so the PR entry point is clearer
|
||||
- merged contributions are immediately visible on the default branch
|
||||
- maintainers can handle downstream sync and release preparation in one place
|
||||
|
||||
Recommended flow:
|
||||
|
||||
1. Fork this repository
|
||||
2. Create a branch in your fork (`fix/*` or `feature/*` is recommended)
|
||||
3. Make your changes and perform basic self-checks
|
||||
4. Push the branch to your fork
|
||||
5. Open a pull request against the `main` branch of this repository
|
||||
|
||||
---
|
||||
|
||||
## Pull Request Requirements
|
||||
|
||||
Please keep each pull request focused, reviewable, and easy to validate.
|
||||
|
||||
Recommended expectations:
|
||||
|
||||
- one pull request should address one logical change
|
||||
- use a clear title that explains the purpose
|
||||
- include the following in the description:
|
||||
- background and problem statement
|
||||
- key changes
|
||||
- impact scope
|
||||
- validation method
|
||||
- include screenshots or recordings for UI changes when helpful
|
||||
- explicitly mention risk and rollback notes for compatibility, data, or build-chain changes
|
||||
|
||||
---
|
||||
|
||||
## Merge Strategy for Maintainers
|
||||
|
||||
Pull requests merged into `main` should generally use **Squash and merge**.
|
||||
|
||||
Reasons:
|
||||
|
||||
- keeps `main` history clean and linear
|
||||
- maps each PR to a single commit on `main`
|
||||
- reduces release, audit, and rollback complexity
|
||||
|
||||
---
|
||||
|
||||
## Maintainer Sync Rules
|
||||
|
||||
Because external pull requests are merged directly into `main`, maintainers must sync `main` back to development and release branches to avoid branch drift.
|
||||
|
||||
### 1. Sync `main` -> `dev` (required)
|
||||
|
||||
The automatic GitHub Actions sync workflow has been removed.
|
||||
Maintainers should sync `main` back to `dev` manually when needed:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. Create `release/*` from `dev`
|
||||
|
||||
Before a release, create a release branch from `dev`, for example:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. Release from `release/*` back to `main`
|
||||
|
||||
When release preparation is complete, merge the release branch back into `main` and create a tag:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. Sync `main` back to `dev` after release
|
||||
|
||||
After the release, the same automation still applies. If needed, you can run the workflow manually (`workflow_dispatch`) or execute the fallback commands:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Commit Message Recommendation
|
||||
|
||||
Keep commit messages clear and easy to audit.
|
||||
|
||||
Recommended format:
|
||||
|
||||
```text
|
||||
emoji type(scope): concise description
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): fix DuckDB driver toolchain on Windows AMD64
|
||||
✨ feat(redis): add Stream data browsing support
|
||||
♻️ refactor(datagrid): optimize large-table horizontal scrolling and rendering
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Additional Notes
|
||||
|
||||
- Please include validation results for documentation, build-chain, or driver compatibility changes
|
||||
- For larger changes, opening an issue or draft PR first is recommended
|
||||
- Maintainers may ask contributors to narrow the scope if the change conflicts with the current project direction
|
||||
|
||||
Thank you for contributing.
|
||||
155
CONTRIBUTING.zh-CN.md
Normal file
155
CONTRIBUTING.zh-CN.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# 贡献指南
|
||||
|
||||
感谢你对本项目的贡献。
|
||||
|
||||
本项目采用“发布优先(`main` 为默认分支)+ `release/*` 分支发版”的协作模型。为减少分支漂移与 PR 处理成本,请在提交贡献前先阅读本指南。
|
||||
|
||||
---
|
||||
|
||||
## 分支模型
|
||||
|
||||
- `main`:稳定发布分支,也是仓库默认分支
|
||||
- `dev`:日常开发集成分支,主要供维护者使用
|
||||
- `release/*`:发布准备分支,主要供维护者使用
|
||||
- 外部贡献者建议使用以下分支命名:
|
||||
- `fix/*`:问题修复
|
||||
- `feature/*`:功能新增或增强
|
||||
|
||||
维护者发布流转如下:
|
||||
|
||||
```text
|
||||
feature/* / fix/* -> dev -> release/* -> main -> tag(vX.Y.Z)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 外部贡献者如何提 Pull Request
|
||||
|
||||
无论是 `fix/*` 还是 `feature/*`,**外部贡献者统一直接向 `main` 发起 Pull Request**。
|
||||
|
||||
这样做的原因:
|
||||
|
||||
- `main` 是默认分支,PR 入口更直观
|
||||
- 合并后贡献会直接体现在默认分支
|
||||
- 便于维护者统一做后续同步与发版整理
|
||||
|
||||
建议流程:
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 从你自己的仓库创建分支(建议命名为 `fix/*` 或 `feature/*`)
|
||||
3. 完成代码修改,并进行必要自检
|
||||
4. 推送到你的远程分支
|
||||
5. 向本仓库的 `main` 分支发起 Pull Request
|
||||
|
||||
---
|
||||
|
||||
## Pull Request 要求
|
||||
|
||||
请尽量保证 PR 单一、清晰、可审核。
|
||||
|
||||
建议遵循以下要求:
|
||||
|
||||
- 一个 PR 只解决一类问题,避免混入无关改动
|
||||
- 标题清晰说明改动目的
|
||||
- 描述中说明:
|
||||
- 背景与问题
|
||||
- 变更点
|
||||
- 影响范围
|
||||
- 验证方式
|
||||
- 如涉及 UI 调整,建议附截图或录屏
|
||||
- 如涉及兼容性、数据变更或构建链路调整,请明确说明风险和回滚方式
|
||||
|
||||
---
|
||||
|
||||
## PR 合并策略(维护者)
|
||||
|
||||
`main` 分支上的 PR 建议使用 **Squash and merge**。
|
||||
|
||||
原因:
|
||||
|
||||
- 保持 `main` 历史干净、线性
|
||||
- 每个 PR 在 `main` 上对应一个清晰提交
|
||||
- 降低发布排查与回滚成本
|
||||
|
||||
---
|
||||
|
||||
## 维护者同步规则
|
||||
|
||||
由于外部 PR 会直接合入 `main`,维护者必须及时将 `main` 的变更同步到开发与发布分支,避免分支漂移。
|
||||
|
||||
### 1. main → dev 同步(必做)
|
||||
|
||||
仓库已移除 GitHub Actions 自动回灌 workflow。
|
||||
当前统一采用手动方式将 `main` 同步回 `dev`:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
### 2. 发版前从 dev 切 release/*
|
||||
|
||||
发布前由维护者基于 `dev` 创建发布分支,例如:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git checkout -b release/v0.6.0
|
||||
git push -u origin release/v0.6.0
|
||||
```
|
||||
|
||||
### 3. release/* → main 发版
|
||||
|
||||
发布准备完成后,将 `release/*` 合并回 `main`,并打标签发布:
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull
|
||||
git merge release/v0.6.0
|
||||
git push
|
||||
git tag v0.6.0
|
||||
git push origin v0.6.0
|
||||
```
|
||||
|
||||
### 4. main 回流到 dev(发版后必做)
|
||||
|
||||
发布完成后,仍沿用同一套自动化流程;如有需要,也可以手动触发 `workflow_dispatch`,或执行以下兜底命令,确保开发线与发布线一致:
|
||||
|
||||
```bash
|
||||
git checkout dev
|
||||
git pull
|
||||
git merge main
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 提交建议
|
||||
|
||||
建议保持提交信息简洁、明确,便于维护者审查与后续追踪。
|
||||
|
||||
推荐格式:
|
||||
|
||||
```text
|
||||
emoji type(scope): 中文描述
|
||||
```
|
||||
|
||||
示例:
|
||||
|
||||
```text
|
||||
🔧 fix(ci): 修复 Windows AMD64 下 DuckDB 驱动构建工具链
|
||||
✨ feat(redis): 新增 Stream 类型数据浏览支持
|
||||
♻️ refactor(datagrid): 优化大表横向滚动与渲染结构
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 其他说明
|
||||
|
||||
- 文档、构建链路、驱动兼容性相关改动,请尽量附带验证结果
|
||||
- 若改动较大,建议先提 Issue 或 Draft PR,先对齐方案再实施
|
||||
- 如提交内容与项目当前架构方向冲突,维护者可能要求收敛范围后再合并
|
||||
|
||||
感谢你的贡献。
|
||||
11
README.md
11
README.md
@@ -154,6 +154,7 @@ Artifacts are generated in `build/bin`.
|
||||
|
||||
The repository includes a release workflow.
|
||||
Push a `v*` tag to trigger automated build and release.
|
||||
Release notes are generated automatically from merged pull requests and categorized by `.github/release.yaml`.
|
||||
|
||||
Target artifacts include:
|
||||
- macOS (AMD64 / ARM64)
|
||||
@@ -200,11 +201,11 @@ If you use Linux artifacts with the `-WebKit41` suffix, prefer Debian 13 / Ubunt
|
||||
|
||||
Issues and pull requests are welcome.
|
||||
|
||||
1. Fork the repository.
|
||||
2. Create a feature branch.
|
||||
3. Commit your changes.
|
||||
4. Push to your branch.
|
||||
5. Open a pull request.
|
||||
For the full workflow, branch model, and maintainer sync rules, see:
|
||||
|
||||
- [CONTRIBUTING.md](CONTRIBUTING.md)
|
||||
|
||||
External contributors should open pull requests directly against `main`.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -147,6 +147,7 @@ wails build -clean
|
||||
### 跨平台发布(GitHub Actions)
|
||||
|
||||
仓库内置发布流水线,推送 `v*` Tag 可自动构建并发布 Release。
|
||||
Release 更新说明会基于已合并 Pull Request 自动生成,并按 `.github/release.yaml` 分类。
|
||||
|
||||
支持目标:
|
||||
- macOS (AMD64 / ARM64)
|
||||
@@ -183,11 +184,11 @@ sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0
|
||||
|
||||
欢迎提交 Issue 与 Pull Request。
|
||||
|
||||
1. Fork 本仓库。
|
||||
2. 创建特性分支。
|
||||
3. 提交改动。
|
||||
4. 推送分支。
|
||||
5. 发起 Pull Request。
|
||||
完整流程、分支模型与维护者同步规则请查看:
|
||||
|
||||
- [CONTRIBUTING.zh-CN.md](CONTRIBUTING.zh-CN.md)
|
||||
|
||||
外部贡献者统一直接向 `main` 发起 Pull Request。
|
||||
|
||||
## 开源协议
|
||||
|
||||
|
||||
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
12
cmd/optional-driver-agent/provider_mongodb_v1.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_mongodb_driver_v1
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "mongodb"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.MongoDBV1{}
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
5b8157374dae5f9340e31b2d0bd2c00e
|
||||
d0f9366af59a6367ad3c7e2d4185ead4
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useMemo, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
@@ -31,6 +31,118 @@ type TableOps = {
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
const quoteSqlIdent = (dbType: string, ident: string): string => {
|
||||
const raw = String(ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'mysql' || t === 'mariadb' || t === 'diros' || t === 'sphinx' || t === 'clickhouse' || t === 'tdengine') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
if (t === 'sqlserver') {
|
||||
return `[${raw.replace(/]/g, ']]')}]`;
|
||||
}
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
|
||||
const quoteSqlTable = (dbType: string, tableName: string): string => {
|
||||
const raw = String(tableName || '').trim();
|
||||
if (!raw) return raw;
|
||||
if (!raw.includes('.')) return quoteSqlIdent(dbType, raw);
|
||||
return raw
|
||||
.split('.')
|
||||
.map((part) => quoteSqlIdent(dbType, part))
|
||||
.join('.');
|
||||
};
|
||||
|
||||
const toSqlLiteral = (value: any, dbType: string): string => {
|
||||
if (value === null || value === undefined) return 'NULL';
|
||||
if (typeof value === 'number') return Number.isFinite(value) ? String(value) : 'NULL';
|
||||
if (typeof value === 'bigint') return value.toString();
|
||||
if (typeof value === 'boolean') {
|
||||
const t = String(dbType || '').toLowerCase();
|
||||
if (t === 'sqlserver') return value ? '1' : '0';
|
||||
return value ? 'TRUE' : 'FALSE';
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return `'${value.toISOString().replace(/'/g, "''")}'`;
|
||||
}
|
||||
if (typeof value === 'object') {
|
||||
try {
|
||||
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
|
||||
} catch {
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
}
|
||||
}
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
};
|
||||
|
||||
const buildSqlPreview = (
|
||||
previewData: any,
|
||||
tableName: string,
|
||||
dbType: string,
|
||||
ops?: TableOps,
|
||||
): { sqlText: string; statementCount: number } => {
|
||||
if (!previewData || !tableName) return { sqlText: '', statementCount: 0 };
|
||||
const tableExpr = quoteSqlTable(dbType, tableName);
|
||||
const pkCol = String(previewData.pkColumn || 'id');
|
||||
const statements: string[] = [];
|
||||
|
||||
const insertRows = Array.isArray(previewData.inserts) ? previewData.inserts : [];
|
||||
const updateRows = Array.isArray(previewData.updates) ? previewData.updates : [];
|
||||
const deleteRows = Array.isArray(previewData.deletes) ? previewData.deletes : [];
|
||||
|
||||
const selectedInsert = new Set((ops?.selectedInsertPks || []).map((v) => String(v)));
|
||||
const selectedUpdate = new Set((ops?.selectedUpdatePks || []).map((v) => String(v)));
|
||||
const selectedDelete = new Set((ops?.selectedDeletePks || []).map((v) => String(v)));
|
||||
|
||||
if (ops?.insert !== false) {
|
||||
insertRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedInsert.size > 0 && !selectedInsert.has(pk)) return;
|
||||
const row = rowWrap?.row || {};
|
||||
const columns = Object.keys(row);
|
||||
if (columns.length === 0) return;
|
||||
const colExpr = columns.map((c) => quoteSqlIdent(dbType, c)).join(', ');
|
||||
const valExpr = columns.map((c) => toSqlLiteral(row[c], dbType)).join(', ');
|
||||
statements.push(`INSERT INTO ${tableExpr} (${colExpr}) VALUES (${valExpr});`);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.update !== false) {
|
||||
updateRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedUpdate.size > 0 && !selectedUpdate.has(pk)) return;
|
||||
const source = rowWrap?.source || {};
|
||||
const changedColumns = Array.isArray(rowWrap?.changedColumns)
|
||||
? rowWrap.changedColumns
|
||||
: Object.keys(source).filter((k) => k !== pkCol);
|
||||
const setCols = changedColumns.filter((c: string) => String(c) !== pkCol);
|
||||
if (setCols.length === 0) return;
|
||||
const setExpr = setCols
|
||||
.map((c: string) => `${quoteSqlIdent(dbType, c)} = ${toSqlLiteral(source[c], dbType)}`)
|
||||
.join(', ');
|
||||
statements.push(
|
||||
`UPDATE ${tableExpr} SET ${setExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
if (ops?.delete) {
|
||||
deleteRows.forEach((rowWrap: any) => {
|
||||
const pk = String(rowWrap?.pk ?? '');
|
||||
if (selectedDelete.size > 0 && !selectedDelete.has(pk)) return;
|
||||
statements.push(
|
||||
`DELETE FROM ${tableExpr} WHERE ${quoteSqlIdent(dbType, pkCol)} = ${toSqlLiteral(pk, dbType)};`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
sqlText: statements.join('\n'),
|
||||
statementCount: statements.length,
|
||||
};
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
@@ -152,32 +264,38 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setSourceConnId(connId);
|
||||
setSourceDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setSourceDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleTargetConnChange = async (connId: string) => {
|
||||
setTargetConnId(connId);
|
||||
setTargetDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
const dbRows = Array.isArray(res.data) ? res.data : [];
|
||||
setTargetDbs(dbRows
|
||||
.map((r: any) => r?.Database || r?.database || r?.username)
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== ''));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const nextToTables = async () => {
|
||||
@@ -189,14 +307,17 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tables = (res.data as any[]).map((row: any) => row.Table || row.table || row.TABLE_NAME || Object.values(row)[0]);
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tableRows = Array.isArray(res.data) ? res.data : [];
|
||||
const tables = tableRows
|
||||
.map((row: any) => row?.Table || row?.table || row?.TABLE_NAME || Object.values(row || {})[0])
|
||||
.filter((name: any) => typeof name === 'string' && name.trim() !== '');
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
message.error(res.message);
|
||||
}
|
||||
}
|
||||
@@ -402,6 +523,13 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
);
|
||||
};
|
||||
|
||||
const previewSql = useMemo(() => {
|
||||
if (!previewData || !previewTable) return { sqlText: '', statementCount: 0 };
|
||||
const targetType = String(connections.find(c => c.id === targetConnId)?.config?.type || '');
|
||||
const ops = tableOptions[previewTable] || { insert: true, update: true, delete: false };
|
||||
return buildSqlPreview(previewData, previewTable, targetType, ops);
|
||||
}, [previewData, previewTable, targetConnId, connections, tableOptions]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
@@ -794,6 +922,51 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'sql',
|
||||
label: `SQL(${previewSql.statementCount})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message="SQL 预览会按当前勾选的插入/更新/删除与行选择范围生成,用于审核确认。"
|
||||
/>
|
||||
<div style={{ marginTop: 8, marginBottom: 8, display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">共 {previewSql.statementCount} 条语句(预览数据最多 200 条/类型)</Text>
|
||||
<Button
|
||||
size="small"
|
||||
disabled={!previewSql.sqlText}
|
||||
onClick={async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(previewSql.sqlText || '');
|
||||
message.success('SQL 已复制');
|
||||
} catch {
|
||||
message.error('复制失败,请手动复制');
|
||||
}
|
||||
}}
|
||||
>
|
||||
复制 SQL
|
||||
</Button>
|
||||
</div>
|
||||
<pre
|
||||
style={{
|
||||
margin: 0,
|
||||
padding: 10,
|
||||
border: '1px solid #f0f0f0',
|
||||
borderRadius: 6,
|
||||
background: '#fafafa',
|
||||
maxHeight: 420,
|
||||
overflow: 'auto',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-word'
|
||||
}}
|
||||
>
|
||||
{previewSql.sqlText || '-- 当前勾选范围下无 SQL 可预览'}
|
||||
</pre>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef, useMemo } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { buildOrderBySQL, buildWhereSQL, quoteIdentPart, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { buildMongoCountCommand, buildMongoFilter, buildMongoFindCommand, buildMongoSort } from '../utils/mongodb';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
|
||||
type ViewerPaginationState = {
|
||||
@@ -151,7 +152,56 @@ const reverseOrderBySQL = (orderBySQL: string): string => {
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
};
|
||||
|
||||
type ViewerFilterSnapshot = {
|
||||
showFilter: boolean;
|
||||
conditions: FilterCondition[];
|
||||
currentPage: number;
|
||||
pageSize: number;
|
||||
sortInfo: { columnKey: string, order: string } | null;
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
};
|
||||
|
||||
type ViewerScrollSnapshot = {
|
||||
top: number;
|
||||
left: number;
|
||||
};
|
||||
|
||||
const viewerFilterSnapshotsByTab = new Map<string, ViewerFilterSnapshot>();
|
||||
|
||||
const normalizeViewerFilterConditions = (conditions: FilterCondition[] | undefined): FilterCondition[] => {
|
||||
if (!Array.isArray(conditions)) return [];
|
||||
return conditions.map((cond) => ({
|
||||
id: Number.isFinite(Number(cond?.id)) ? Number(cond?.id) : undefined,
|
||||
enabled: cond?.enabled !== false,
|
||||
logic: String(cond?.logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND',
|
||||
column: String(cond?.column || ''),
|
||||
op: String(cond?.op || '='),
|
||||
value: String(cond?.value ?? ''),
|
||||
value2: String(cond?.value2 ?? ''),
|
||||
}));
|
||||
};
|
||||
|
||||
const getViewerFilterSnapshot = (tabId: string): ViewerFilterSnapshot => {
|
||||
const cached = viewerFilterSnapshotsByTab.get(String(tabId || '').trim());
|
||||
if (!cached) {
|
||||
return { showFilter: false, conditions: [], currentPage: 1, pageSize: 100, sortInfo: null, scrollTop: 0, scrollLeft: 0 };
|
||||
}
|
||||
return {
|
||||
showFilter: cached.showFilter === true,
|
||||
conditions: normalizeViewerFilterConditions(cached.conditions),
|
||||
currentPage: Number.isFinite(Number(cached.currentPage)) && Number(cached.currentPage) > 0 ? Number(cached.currentPage) : 1,
|
||||
pageSize: Number.isFinite(Number(cached.pageSize)) && Number(cached.pageSize) > 0 ? Number(cached.pageSize) : 100,
|
||||
sortInfo: cached.sortInfo && cached.sortInfo.columnKey && (cached.sortInfo.order === 'ascend' || cached.sortInfo.order === 'descend')
|
||||
? { columnKey: String(cached.sortInfo.columnKey), order: cached.sortInfo.order }
|
||||
: null,
|
||||
scrollTop: Number.isFinite(Number(cached.scrollTop)) ? Number(cached.scrollTop) : 0,
|
||||
scrollLeft: Number.isFinite(Number(cached.scrollLeft)) ? Number(cached.scrollLeft) : 0,
|
||||
};
|
||||
};
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const initialViewerSnapshot = useMemo(() => getViewerFilterSnapshot(tab.id), [tab.id]);
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
const [columnNames, setColumnNames] = useState<string[]>([]);
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
@@ -172,10 +222,15 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const latestDbNameRef = useRef<string>('');
|
||||
const latestCountSqlRef = useRef<string>('');
|
||||
const latestCountKeyRef = useRef<string>('');
|
||||
const scrollSnapshotRef = useRef<ViewerScrollSnapshot>({
|
||||
top: initialViewerSnapshot.scrollTop,
|
||||
left: initialViewerSnapshot.scrollLeft,
|
||||
});
|
||||
const initialLoadRef = useRef(false);
|
||||
|
||||
const [pagination, setPagination] = useState<ViewerPaginationState>({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
current: initialViewerSnapshot.currentPage,
|
||||
pageSize: initialViewerSnapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
@@ -183,10 +238,10 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
totalCountCancelled: false,
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(initialViewerSnapshot.sortInfo);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>([]);
|
||||
const [showFilter, setShowFilter] = useState<boolean>(initialViewerSnapshot.showFilter);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>(initialViewerSnapshot.conditions);
|
||||
const duckdbSafeSelectCacheRef = useRef<Record<string, string>>({});
|
||||
const currentConnConfig = connections.find(c => c.id === tab.connectionId)?.config;
|
||||
const currentConnCaps = getDataSourceCapabilities(currentConnConfig);
|
||||
@@ -194,6 +249,28 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const forceReadOnly = currentConnCaps.forceReadOnlyQueryResult;
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setShowFilter(snapshot.showFilter);
|
||||
setFilterConditions(snapshot.conditions);
|
||||
setSortInfo(snapshot.sortInfo);
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
}, [tab.id]);
|
||||
|
||||
useEffect(() => {
|
||||
viewerFilterSnapshotsByTab.set(tab.id, {
|
||||
showFilter,
|
||||
conditions: normalizeViewerFilterConditions(filterConditions),
|
||||
currentPage: pagination.current,
|
||||
pageSize: pagination.pageSize,
|
||||
sortInfo,
|
||||
scrollTop: scrollSnapshotRef.current.top,
|
||||
scrollLeft: scrollSnapshotRef.current.left,
|
||||
});
|
||||
}, [tab.id, showFilter, filterConditions, pagination.current, pagination.pageSize, sortInfo]);
|
||||
|
||||
useEffect(() => {
|
||||
const snapshot = getViewerFilterSnapshot(tab.id);
|
||||
setPkColumns([]);
|
||||
pkKeyRef.current = '';
|
||||
countKeyRef.current = '';
|
||||
@@ -205,16 +282,29 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
latestDbNameRef.current = '';
|
||||
latestCountSqlRef.current = '';
|
||||
latestCountKeyRef.current = '';
|
||||
scrollSnapshotRef.current = { top: snapshot.scrollTop, left: snapshot.scrollLeft };
|
||||
initialLoadRef.current = false;
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
current: 1,
|
||||
current: snapshot.currentPage,
|
||||
pageSize: snapshot.pageSize,
|
||||
total: 0,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
}, [tab.connectionId, tab.dbName, tab.tableName]);
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName]);
|
||||
|
||||
const handleTableScrollSnapshotChange = useCallback((snapshot: ViewerScrollSnapshot) => {
|
||||
scrollSnapshotRef.current = snapshot;
|
||||
const cached = getViewerFilterSnapshot(tab.id);
|
||||
viewerFilterSnapshotsByTab.set(tab.id, {
|
||||
...cached,
|
||||
scrollTop: snapshot.top,
|
||||
scrollLeft: snapshot.left,
|
||||
});
|
||||
}, [tab.id]);
|
||||
|
||||
const handleDuckDBManualCount = useCallback(async () => {
|
||||
if (latestDbTypeRef.current !== 'duckdb') {
|
||||
@@ -315,42 +405,67 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const tableName = tab.tableName || '';
|
||||
const isMongoDB = dbTypeLower === 'mongodb';
|
||||
let mongoFilter: Record<string, unknown> | undefined;
|
||||
if (isMongoDB) {
|
||||
try {
|
||||
mongoFilter = buildMongoFilter(filterConditions);
|
||||
} catch (e: any) {
|
||||
message.error(`Mongo 筛选条件无效:${String(e?.message || e || '解析失败')}`);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
|
||||
const baseSql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
const orderBySQL = buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
let sql = `${baseSql}${orderBySQL}`;
|
||||
const whereSQL = isMongoDB
|
||||
? JSON.stringify(mongoFilter || {})
|
||||
: buildWhereSQL(dbType, filterConditions);
|
||||
const countSql = isMongoDB
|
||||
? buildMongoCountCommand(tableName, mongoFilter || {})
|
||||
: `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
const orderBySQL = isMongoDB ? '' : buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const totalRows = Number(pagination.total);
|
||||
const hasFiniteTotal = Number.isFinite(totalRows) && totalRows >= 0;
|
||||
const totalKnown = pagination.totalKnown && hasFiniteTotal;
|
||||
const totalPages = hasFiniteTotal ? Math.max(1, Math.ceil(totalRows / size)) : 0;
|
||||
const currentPage = totalPages > 0 ? Math.min(Math.max(1, page), totalPages) : Math.max(1, page);
|
||||
const offset = (currentPage - 1) * size;
|
||||
const isClickHouse = dbTypeLower === 'clickhouse';
|
||||
const isClickHouse = !isMongoDB && dbTypeLower === 'clickhouse';
|
||||
const reverseOrderSQL = isClickHouse ? reverseOrderBySQL(orderBySQL) : '';
|
||||
let useClickHouseReversePagination = false;
|
||||
let clickHouseReverseLimit = 0;
|
||||
let clickHouseReverseHasMore = false;
|
||||
// ClickHouse 深分页在超大 OFFSET 下容易超时。对于总数已知且存在 ORDER BY 的场景,
|
||||
// 当“尾部偏移”小于“头部偏移”时,改为反向 ORDER BY + 小 OFFSET,并在前端翻转结果。
|
||||
if (isClickHouse && totalKnown && offset > 0 && reverseOrderSQL) {
|
||||
const pageRowCount = Math.max(0, Math.min(size, totalRows - offset));
|
||||
if (pageRowCount > 0) {
|
||||
const tailOffset = Math.max(0, totalRows - (offset + pageRowCount));
|
||||
if (tailOffset < offset) {
|
||||
sql = `${baseSql}${reverseOrderSQL} LIMIT ${pageRowCount} OFFSET ${tailOffset}`;
|
||||
useClickHouseReversePagination = true;
|
||||
clickHouseReverseLimit = pageRowCount;
|
||||
clickHouseReverseHasMore = currentPage < totalPages;
|
||||
let sql = '';
|
||||
if (isMongoDB) {
|
||||
const mongoSort = buildMongoSort(sortInfo, pkColumns);
|
||||
sql = buildMongoFindCommand({
|
||||
collection: tableName,
|
||||
filter: mongoFilter || {},
|
||||
sort: mongoSort,
|
||||
limit: size + 1,
|
||||
skip: offset,
|
||||
});
|
||||
} else {
|
||||
const baseSql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql = `${baseSql}${orderBySQL}`;
|
||||
// ClickHouse 深分页在超大 OFFSET 下容易超时。对于总数已知且存在 ORDER BY 的场景,
|
||||
// 当“尾部偏移”小于“头部偏移”时,改为反向 ORDER BY + 小 OFFSET,并在前端翻转结果。
|
||||
if (isClickHouse && totalKnown && offset > 0 && reverseOrderSQL) {
|
||||
const pageRowCount = Math.max(0, Math.min(size, totalRows - offset));
|
||||
if (pageRowCount > 0) {
|
||||
const tailOffset = Math.max(0, totalRows - (offset + pageRowCount));
|
||||
if (tailOffset < offset) {
|
||||
sql = `${baseSql}${reverseOrderSQL} LIMIT ${pageRowCount} OFFSET ${tailOffset}`;
|
||||
useClickHouseReversePagination = true;
|
||||
clickHouseReverseLimit = pageRowCount;
|
||||
clickHouseReverseHasMore = currentPage < totalPages;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!useClickHouseReversePagination) {
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
if (!useClickHouseReversePagination) {
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
}
|
||||
}
|
||||
|
||||
const requestStartTime = Date.now();
|
||||
@@ -676,9 +791,32 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const handleToggleFilter = useCallback(() => setShowFilter(prev => !prev), []);
|
||||
const handleApplyFilter = useCallback((conditions: FilterCondition[]) => setFilterConditions(conditions), []);
|
||||
|
||||
const exportSqlWithFilter = useMemo(() => {
|
||||
const tableName = String(tab.tableName || '').trim();
|
||||
const dbType = String(currentConnConfig?.type || '').trim();
|
||||
if (!tableName || !dbType) return '';
|
||||
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
if (!whereSQL) return '';
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const normalizedType = dbType.toLowerCase();
|
||||
const hasExplicitSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
if (hasExplicitSort && (normalizedType === 'mysql' || normalizedType === 'mariadb')) {
|
||||
sql = withSortBufferTuningSQL(normalizedType, sql, 32 * 1024 * 1024);
|
||||
}
|
||||
return sql;
|
||||
}, [tab.tableName, currentConnConfig?.type, filterConditions, sortInfo, pkColumns]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
if (!initialLoadRef.current) {
|
||||
initialLoadRef.current = true;
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
return;
|
||||
}
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab.id, tab.connectionId, tab.dbName, tab.tableName, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
|
||||
return (
|
||||
<div style={{ flex: '1 1 auto', minHeight: 0, minWidth: 0, height: '100%', width: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
@@ -700,8 +838,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
showFilter={showFilter}
|
||||
onToggleFilter={handleToggleFilter}
|
||||
onApplyFilter={handleApplyFilter}
|
||||
appliedFilterConditions={filterConditions}
|
||||
readOnly={forceReadOnly}
|
||||
sortInfoExternal={sortInfo}
|
||||
exportSqlWithFilter={exportSqlWithFilter || undefined}
|
||||
scrollSnapshot={scrollSnapshotRef.current}
|
||||
onScrollSnapshotChange={handleTableScrollSnapshotChange}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Alert, Button, Collapse, Input, Modal, Progress, Select, Space, Switch,
|
||||
import { DeleteOutlined, DownloadOutlined, FileSearchOutlined, FolderOpenOutlined, InfoCircleFilled, ReloadOutlined } from '@ant-design/icons';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
import { useStore } from '../store';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
import {
|
||||
CheckDriverNetworkStatus,
|
||||
DownloadDriverPackage,
|
||||
@@ -166,7 +166,8 @@ const DriverManagerModal: React.FC<{ open: boolean; onClose: () => void; onOpenG
|
||||
const theme = useStore((state) => state.theme);
|
||||
const appearance = useStore((state) => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const opacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
const modalContentRef = useRef<HTMLDivElement | null>(null);
|
||||
const tableContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
const tableScrollTargetsRef = useRef<HTMLElement[]>([]);
|
||||
@@ -1223,7 +1224,7 @@ const DriverManagerModal: React.FC<{ open: boolean; onClose: () => void; onOpenG
|
||||
paddingRight: 18,
|
||||
},
|
||||
}}
|
||||
destroyOnClose
|
||||
destroyOnHidden
|
||||
footer={(
|
||||
<div className="driver-manager-footer">
|
||||
<div
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import React, { useRef, useEffect } from 'react';
|
||||
import { Table, Tag, Button, Tooltip } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, CaretRightOutlined, BugOutlined } from '@ant-design/icons';
|
||||
import { Table, Tag, Button, Tooltip, Empty } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, BugOutlined, ClockCircleOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
interface LogPanelProps {
|
||||
height: number;
|
||||
@@ -16,7 +16,8 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const opacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
|
||||
// Background Helper
|
||||
const getBg = (darkHex: string) => {
|
||||
@@ -28,10 +29,25 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
||||
const bgMain = getBg('#1d1d1d');
|
||||
const panelDividerColor = darkMode ? 'rgba(255,255,255,0.08)' : 'rgba(0,0,0,0.08)';
|
||||
const shellOpacity = darkMode ? Math.max(0.18, opacity * 0.82) : Math.max(0.28, opacity * 0.92);
|
||||
const shellOpacityStrong = darkMode ? Math.max(0.22, opacity * 0.9) : Math.max(0.34, opacity * 0.96);
|
||||
const panelDividerColor = darkMode
|
||||
? `rgba(255,255,255,${Math.max(0.04, opacity * 0.10)})`
|
||||
: `rgba(0,0,0,${Math.max(0.04, opacity * 0.08)})`;
|
||||
const panelMutedTextColor = darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(0,0,0,0.58)';
|
||||
const logScrollbarThumb = darkMode ? 'rgba(255, 255, 255, 0.34)' : 'rgba(0, 0, 0, 0.26)';
|
||||
const logScrollbarThumbHover = darkMode ? 'rgba(255, 255, 255, 0.5)' : 'rgba(0, 0, 0, 0.36)';
|
||||
const panelShellBg = darkMode
|
||||
? `linear-gradient(180deg, rgba(15,20,30,${shellOpacity}) 0%, rgba(9,13,22,${shellOpacityStrong}) 100%)`
|
||||
: `linear-gradient(180deg, rgba(255,255,255,${shellOpacityStrong}) 0%, rgba(246,248,252,${shellOpacity}) 100%)`;
|
||||
const panelAccentColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
const panelShadow = darkMode
|
||||
? `0 12px 28px rgba(0,0,0,${Math.max(0.05, opacity * 0.18)})`
|
||||
: `0 12px 24px rgba(15,23,42,${Math.max(0.02, opacity * 0.08)})`;
|
||||
const logScrollbarThumb = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.18, opacity * 0.34)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.12, opacity * 0.26)})`;
|
||||
const logScrollbarThumbHover = darkMode
|
||||
? `rgba(255, 255, 255, ${Math.max(0.28, opacity * 0.48)})`
|
||||
: `rgba(0, 0, 0, ${Math.max(0.18, opacity * 0.36)})`;
|
||||
|
||||
const columns = [
|
||||
{
|
||||
@@ -45,7 +61,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
dataIndex: 'status',
|
||||
width: 70,
|
||||
render: (status: string) => (
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0 }}>
|
||||
<Tag color={status === 'success' ? 'success' : 'error'} style={{ marginRight: 0, borderRadius: 999, paddingInline: 8, fontSize: 11, fontWeight: 700 }}>
|
||||
{status === 'success' ? 'OK' : 'ERR'}
|
||||
</Tag>
|
||||
)
|
||||
@@ -60,7 +76,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
title: 'SQL / Message',
|
||||
dataIndex: 'sql',
|
||||
render: (text: string, record: any) => (
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.2' }}>
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.45' }}>
|
||||
<div style={{ color: darkMode ? '#a6e22e' : '#005cc5' }}>{text}</div>
|
||||
{record.message && <div style={{ color: '#ff4d4f', marginTop: 2 }}>{record.message}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: panelMutedTextColor, marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
@@ -72,12 +88,18 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return (
|
||||
<div style={{
|
||||
height,
|
||||
borderTop: `1px solid ${panelDividerColor}`,
|
||||
background: bgMain,
|
||||
margin: 0,
|
||||
border: `1px solid ${panelDividerColor}`,
|
||||
borderRadius: 14,
|
||||
background: panelShellBg,
|
||||
WebkitBackdropFilter: opacity < 0.999 ? 'blur(14px)' : 'none',
|
||||
boxShadow: panelShadow,
|
||||
backdropFilter: darkMode && opacity < 0.999 ? 'blur(18px)' : 'none',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
position: 'relative',
|
||||
zIndex: 100 // Ensure above other content
|
||||
overflow: 'hidden',
|
||||
zIndex: 100
|
||||
}}>
|
||||
{/* Resize Handle */}
|
||||
<div
|
||||
@@ -95,38 +117,53 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
|
||||
{/* Toolbar */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
padding: '10px 14px',
|
||||
borderBottom: `1px solid ${panelDividerColor}`,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
height: 32
|
||||
gap: 12,
|
||||
minHeight: 48
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, fontWeight: 'bold', fontSize: '12px' }}>
|
||||
<BugOutlined /> SQL 执行日志
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 10, minWidth: 0 }}>
|
||||
<div style={{ width: 30, height: 30, borderRadius: 10, display: 'grid', placeItems: 'center', background: darkMode ? `rgba(255,214,102,${Math.max(0.10, Math.min(0.18, opacity * 0.18))})` : `rgba(24,144,255,${Math.max(0.08, Math.min(0.16, opacity * 0.16))})`, color: panelAccentColor, flexShrink: 0 }}>
|
||||
<BugOutlined />
|
||||
</div>
|
||||
<div style={{ minWidth: 0 }}>
|
||||
<div style={{ fontWeight: 700, fontSize: 13, color: darkMode ? '#f5f7ff' : '#162033' }}>SQL 执行日志</div>
|
||||
<div style={{ fontSize: 12, color: panelMutedTextColor }}>记录执行状态、耗时与错误信息,便于快速回溯。</div>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<Tooltip title="清空日志">
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} />
|
||||
<Button type="text" size="small" icon={<ClearOutlined />} onClick={clearSqlLogs} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
<Tooltip title="关闭面板">
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} />
|
||||
<Button type="text" size="small" icon={<CloseOutlined />} onClick={onClose} style={{ color: panelMutedTextColor }} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* List */}
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto' }}>
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
// scroll={{ y: height - 32 }} // Let flex handle it
|
||||
/>
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto', padding: '8px 10px 10px' }}>
|
||||
{sqlLogs.length === 0 ? (
|
||||
<div style={{ height: '100%', minHeight: 160, display: 'grid', placeItems: 'center' }}>
|
||||
<Empty
|
||||
image={Empty.PRESENTED_IMAGE_SIMPLE}
|
||||
description={<span style={{ color: panelMutedTextColor }}>暂无 SQL 执行日志</span>}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey="id"
|
||||
showHeader={false}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<style>{`
|
||||
.log-panel-scroll {
|
||||
@@ -156,6 +193,16 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
background: transparent !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
padding: 8px 10px !important;
|
||||
border-bottom: 1px solid ${panelDividerColor} !important;
|
||||
}
|
||||
.log-panel-table .ant-table-tbody > tr:last-child > td {
|
||||
border-bottom: none !important;
|
||||
}
|
||||
.log-panel-table .ant-table-row:hover > td {
|
||||
background: ${darkMode ? 'rgba(255,255,255,0.03)' : 'rgba(16,24,40,0.03)'} !important;
|
||||
}
|
||||
`}</style>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import Editor, { OnMount } from '@monaco-editor/react';
|
||||
import { Button, message, Modal, Input, Form, Dropdown, MenuProps, Tooltip, Select, Tabs } from 'antd';
|
||||
import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutlined, CloseOutlined } from '@ant-design/icons';
|
||||
import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutlined, CloseOutlined, StopOutlined } from '@ant-design/icons';
|
||||
import { format } from 'sql-formatter';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import { DBQuery, DBQueryWithCancel, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns, CancelQuery, GenerateQueryID } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
import { convertMongoShellToJsonCommand } from '../utils/mongodb';
|
||||
import { getShortcutDisplay, isEditableElement, isShortcutMatch } from '../utils/shortcuts';
|
||||
|
||||
const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [query, setQuery] = useState(tab.query || 'SELECT * FROM ');
|
||||
@@ -30,7 +33,9 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [activeResultKey, setActiveResultKey] = useState<string>('');
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [currentQueryId, setCurrentQueryId] = useState<string>('');
|
||||
const runSeqRef = useRef(0);
|
||||
const currentQueryIdRef = useRef('');
|
||||
const [isSaveModalOpen, setIsSaveModalOpen] = useState(false);
|
||||
const [saveForm] = Form.useForm();
|
||||
|
||||
@@ -43,6 +48,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [editorHeight, setEditorHeight] = useState(300);
|
||||
const editorRef = useRef<any>(null);
|
||||
const monacoRef = useRef<any>(null);
|
||||
const lastExternalQueryRef = useRef<string>(tab.query || '');
|
||||
const dragRef = useRef<{ startY: number, startHeight: number } | null>(null);
|
||||
const tablesRef = useRef<{dbName: string, tableName: string}[]>([]); // Store tables for autocomplete (cross-db)
|
||||
const allColumnsRef = useRef<{dbName: string, tableName: string, name: string, type: string}[]>([]); // Store all columns (cross-db)
|
||||
@@ -65,6 +71,8 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const setSqlFormatOptions = useStore(state => state.setSqlFormatOptions);
|
||||
const queryOptions = useStore(state => state.queryOptions);
|
||||
const setQueryOptions = useStore(state => state.setQueryOptions);
|
||||
const shortcutOptions = useStore(state => state.shortcutOptions);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
|
||||
useEffect(() => {
|
||||
currentConnectionIdRef.current = currentConnectionId;
|
||||
@@ -88,10 +96,30 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
connectionsRef.current = connections;
|
||||
}, [connections]);
|
||||
|
||||
const getCurrentQuery = () => {
|
||||
const val = editorRef.current?.getValue?.();
|
||||
if (typeof val === 'string') return val;
|
||||
return query || '';
|
||||
};
|
||||
|
||||
const syncQueryToEditor = (sql: string) => {
|
||||
const next = sql || '';
|
||||
setQuery(next);
|
||||
const editor = editorRef.current;
|
||||
if (editor && editor.getValue?.() !== next) {
|
||||
editor.setValue(next);
|
||||
}
|
||||
};
|
||||
|
||||
// If opening a saved query, load its SQL
|
||||
useEffect(() => {
|
||||
if (tab.query) setQuery(tab.query);
|
||||
}, [tab.query]);
|
||||
const incoming = tab.query || '';
|
||||
if (incoming === lastExternalQueryRef.current) {
|
||||
return;
|
||||
}
|
||||
lastExternalQueryRef.current = incoming;
|
||||
syncQueryToEditor(incoming || 'SELECT * FROM ');
|
||||
}, [tab.id, tab.query]);
|
||||
|
||||
// Fetch Database List
|
||||
useEffect(() => {
|
||||
@@ -186,6 +214,17 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
fetchMetadata();
|
||||
}, [currentConnectionId, connections, dbList]); // dbList 变化时触发重新加载
|
||||
|
||||
// Query ID management helpers
|
||||
const setQueryId = (id: string) => {
|
||||
currentQueryIdRef.current = id;
|
||||
setCurrentQueryId(id);
|
||||
};
|
||||
|
||||
const clearQueryId = () => {
|
||||
currentQueryIdRef.current = '';
|
||||
setCurrentQueryId('');
|
||||
};
|
||||
|
||||
// Handle Resizing
|
||||
const handleMouseDown = (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
@@ -254,6 +293,19 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
return parts[parts.length - 1] || raw;
|
||||
};
|
||||
|
||||
const splitSchemaAndTable = (qualified: string): { schema: string; table: string } => {
|
||||
const raw = normalizeQualifiedName(qualified);
|
||||
if (!raw) return { schema: '', table: '' };
|
||||
const parts = raw.split('.').filter(Boolean);
|
||||
if (parts.length >= 2) {
|
||||
return {
|
||||
schema: parts[parts.length - 2] || '',
|
||||
table: parts[parts.length - 1] || '',
|
||||
};
|
||||
}
|
||||
return { schema: '', table: parts[0] || '' };
|
||||
};
|
||||
|
||||
const buildConnConfig = () => {
|
||||
const connId = currentConnectionIdRef.current;
|
||||
const conn = connectionsRef.current.find(c => c.id === connId);
|
||||
@@ -326,13 +378,14 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (qualifierMatch) {
|
||||
const qualifier = stripQuotes(qualifierMatch[1]);
|
||||
const prefix = (qualifierMatch[2] || '').toLowerCase();
|
||||
const qualifierLower = qualifier.toLowerCase();
|
||||
|
||||
// 首先检查 qualifier 是否是数据库名(跨库表提示)
|
||||
const visibleDbs = visibleDbsRef.current;
|
||||
if (visibleDbs.some(db => db.toLowerCase() === qualifier.toLowerCase())) {
|
||||
if (visibleDbs.some(db => db.toLowerCase() === qualifierLower)) {
|
||||
// qualifier 是数据库名,提示该库的表
|
||||
const tables = tablesRef.current.filter(t =>
|
||||
(t.dbName || '').toLowerCase() === qualifier.toLowerCase()
|
||||
(t.dbName || '').toLowerCase() === qualifierLower
|
||||
);
|
||||
const filtered = prefix
|
||||
? tables.filter(t => (t.tableName || '').toLowerCase().startsWith(prefix))
|
||||
@@ -349,6 +402,34 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
// qualifier 是 schema(如 dbo/public)时,仅补全表名,避免输入 dbo. 后再补成 dbo.dbo.table
|
||||
const schemaTables = tablesRef.current
|
||||
.map(t => {
|
||||
const parsed = splitSchemaAndTable(t.tableName || '');
|
||||
return {
|
||||
dbName: t.dbName || '',
|
||||
schema: parsed.schema,
|
||||
table: parsed.table,
|
||||
};
|
||||
})
|
||||
.filter(t => t.schema.toLowerCase() === qualifierLower && !!t.table);
|
||||
|
||||
if (schemaTables.length > 0) {
|
||||
const filtered = prefix
|
||||
? schemaTables.filter(t => t.table.toLowerCase().startsWith(prefix))
|
||||
: schemaTables;
|
||||
|
||||
const suggestions = filtered.map(t => ({
|
||||
label: t.table,
|
||||
kind: monaco.languages.CompletionItemKind.Class,
|
||||
insertText: t.table,
|
||||
detail: `Table (${t.dbName}${t.schema ? '.' + t.schema : ''})`,
|
||||
range,
|
||||
sortText: '0' + t.table
|
||||
}));
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
// 否则检查是否是表别名或表名,提示列
|
||||
const reserved = new Set([
|
||||
'where', 'on', 'group', 'order', 'limit', 'having',
|
||||
@@ -497,8 +578,8 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const handleFormat = () => {
|
||||
try {
|
||||
const formatted = format(query, { language: 'mysql', keywordCase: sqlFormatOptions.keywordCase });
|
||||
setQuery(formatted);
|
||||
const formatted = format(getCurrentQuery(), { language: 'mysql', keywordCase: sqlFormatOptions.keywordCase });
|
||||
syncQueryToEditor(formatted);
|
||||
} catch (e) {
|
||||
message.error("格式化失败: SQL 语法可能有误");
|
||||
}
|
||||
@@ -517,6 +598,12 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
icon: sqlFormatOptions.keywordCase === 'lower' ? '✓' : undefined,
|
||||
onClick: () => setSqlFormatOptions({ keywordCase: 'lower' })
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: 'shortcut-settings',
|
||||
label: '快捷键管理...',
|
||||
onClick: () => window.dispatchEvent(new CustomEvent('gonavi:open-shortcut-settings')),
|
||||
},
|
||||
];
|
||||
|
||||
const splitSQLStatements = (sql: string): string[] => {
|
||||
@@ -979,11 +1066,22 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
};
|
||||
|
||||
const handleRun = async () => {
|
||||
if (!query.trim()) return;
|
||||
const currentQuery = getCurrentQuery();
|
||||
if (!currentQuery.trim()) return;
|
||||
if (!currentDb) {
|
||||
message.error("请先选择数据库");
|
||||
return;
|
||||
}
|
||||
// 如果已有查询在运行,先取消它
|
||||
if (currentQueryIdRef.current) {
|
||||
try {
|
||||
await CancelQuery(currentQueryIdRef.current);
|
||||
} catch (error) {
|
||||
// 忽略取消错误,可能查询已完成
|
||||
}
|
||||
// 清除旧查询ID
|
||||
clearQueryId();
|
||||
}
|
||||
const runSeq = ++runSeqRef.current;
|
||||
setLoading(true);
|
||||
const runStartTime = Date.now();
|
||||
@@ -1010,8 +1108,16 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
};
|
||||
|
||||
try {
|
||||
const rawSQL = getSelectedSQL() || query;
|
||||
const statements = splitSQLStatements(rawSQL);
|
||||
const rawSQL = getSelectedSQL() || currentQuery;
|
||||
const dbType = String((config as any).type || 'mysql');
|
||||
const normalizedDbType = dbType.trim().toLowerCase();
|
||||
const normalizedRawSQL = String(rawSQL || '').replace(/;/g, ';');
|
||||
const splitInput = normalizedDbType === 'mongodb'
|
||||
? normalizedRawSQL
|
||||
.replace(/^\s*\/\/.*$/gm, '')
|
||||
.replace(/^\s*#.*$/gm, '')
|
||||
: normalizedRawSQL;
|
||||
const statements = splitSQLStatements(splitInput);
|
||||
if (statements.length === 0) {
|
||||
message.info('没有可执行的 SQL。');
|
||||
setResultSets([]);
|
||||
@@ -1021,7 +1127,6 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const nextResultSets: ResultSet[] = [];
|
||||
const maxRows = Number(queryOptions?.maxRows) || 0;
|
||||
const dbType = String((config as any).type || 'mysql');
|
||||
const forceReadOnlyResult = connCaps.forceReadOnlyQueryResult;
|
||||
const wantsLimitProbe = Number.isFinite(maxRows) && maxRows > 0;
|
||||
const probeLimit = wantsLimitProbe ? (maxRows + 1) : 0;
|
||||
@@ -1035,9 +1140,35 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const limitApplied = shouldAutoLimit && wantsLimitProbe;
|
||||
const limited = limitApplied ? applyAutoLimit(rawStatement, dbType, probeLimit) : { sql: rawStatement, applied: false, maxRows: probeLimit };
|
||||
const executedSql = limited.sql;
|
||||
let executedSql = limited.sql;
|
||||
if (String(dbType || '').trim().toLowerCase() === 'mongodb') {
|
||||
const shellConvert = convertMongoShellToJsonCommand(executedSql);
|
||||
if (shellConvert.recognized) {
|
||||
if (shellConvert.error) {
|
||||
const prefix = statements.length > 1 ? `第 ${idx + 1} 条语句执行失败:` : '';
|
||||
message.error(prefix + shellConvert.error);
|
||||
setResultSets([]);
|
||||
setActiveResultKey('');
|
||||
return;
|
||||
}
|
||||
if (shellConvert.command) {
|
||||
executedSql = shellConvert.command;
|
||||
}
|
||||
}
|
||||
}
|
||||
const startTime = Date.now();
|
||||
const res = await DBQuery(config as any, currentDb, executedSql);
|
||||
|
||||
// Generate query ID for cancellation using backend UUID with fallback
|
||||
let queryId: string;
|
||||
try {
|
||||
queryId = await GenerateQueryID();
|
||||
} catch (error) {
|
||||
console.warn('GenerateQueryID failed, using local UUID fallback:', error);
|
||||
queryId = 'query-' + uuidv4();
|
||||
}
|
||||
setQueryId(queryId);
|
||||
|
||||
const res = await DBQueryWithCancel(config as any, currentDb, executedSql, queryId);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
addSqlLog({
|
||||
@@ -1052,6 +1183,32 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
|
||||
if (!res.success) {
|
||||
// 检查是否为查询取消错误
|
||||
const errorMsg = res.message.toLowerCase();
|
||||
const isCancelledError = errorMsg.includes('context canceled') ||
|
||||
errorMsg.includes('查询已取消') ||
|
||||
errorMsg.includes('canceled') ||
|
||||
errorMsg.includes('cancelled') ||
|
||||
errorMsg.includes('statement canceled') ||
|
||||
errorMsg.includes('sql: statement canceled');
|
||||
|
||||
// 确保不是超时错误
|
||||
const isTimeoutError = errorMsg.includes('context deadline exceeded') ||
|
||||
errorMsg.includes('timeout') ||
|
||||
errorMsg.includes('超时') ||
|
||||
errorMsg.includes('deadline exceeded');
|
||||
|
||||
if (isCancelledError && !isTimeoutError) {
|
||||
// 查询已被用户取消,不显示错误消息,清理状态
|
||||
setResultSets([]);
|
||||
setActiveResultKey('');
|
||||
// 清除查询ID,与handleCancel保持一致
|
||||
if (currentQueryIdRef.current) {
|
||||
clearQueryId();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const prefix = statements.length > 1 ? `第 ${idx + 1} 条语句执行失败:` : '';
|
||||
message.error(prefix + res.message);
|
||||
setResultSets([]);
|
||||
@@ -1157,16 +1314,82 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setActiveResultKey('');
|
||||
} finally {
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
// Clear query ID after execution completes
|
||||
clearQueryId();
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancel = async () => {
|
||||
if (!currentQueryIdRef.current) {
|
||||
message.warning('没有正在运行的查询可取消');
|
||||
return;
|
||||
}
|
||||
const queryIdToCancel = currentQueryIdRef.current;
|
||||
try {
|
||||
const res = await CancelQuery(queryIdToCancel);
|
||||
if (res.success) {
|
||||
message.success('查询已取消');
|
||||
// Clear query ID after successful cancellation
|
||||
if (currentQueryIdRef.current === queryIdToCancel) {
|
||||
clearQueryId()
|
||||
}
|
||||
} else {
|
||||
message.warning(res.message);
|
||||
}
|
||||
} catch (error: any) {
|
||||
message.error('取消查询失败: ' + error.message);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const binding = shortcutOptions.runQuery;
|
||||
if (!binding?.enabled || !binding.combo) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handleRunShortcut = (event: KeyboardEvent) => {
|
||||
if (activeTabId !== tab.id) {
|
||||
return;
|
||||
}
|
||||
if (!isShortcutMatch(event, binding.combo)) {
|
||||
return;
|
||||
}
|
||||
const editorHasFocus = !!editorRef.current?.hasTextFocus?.();
|
||||
if (!editorHasFocus && !isEditableElement(event.target)) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
void handleRun();
|
||||
};
|
||||
|
||||
window.addEventListener('keydown', handleRunShortcut);
|
||||
return () => {
|
||||
window.removeEventListener('keydown', handleRunShortcut);
|
||||
};
|
||||
}, [activeTabId, tab.id, shortcutOptions.runQuery, handleRun]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleRunActiveQuery = () => {
|
||||
if (activeTabId !== tab.id) {
|
||||
return;
|
||||
}
|
||||
void handleRun();
|
||||
};
|
||||
|
||||
window.addEventListener('gonavi:run-active-query', handleRunActiveQuery as EventListener);
|
||||
return () => {
|
||||
window.removeEventListener('gonavi:run-active-query', handleRunActiveQuery as EventListener);
|
||||
};
|
||||
}, [activeTabId, tab.id, handleRun]);
|
||||
|
||||
const handleSave = async () => {
|
||||
try {
|
||||
const values = await saveForm.validateFields();
|
||||
saveQuery({
|
||||
id: tab.id.startsWith('saved-') ? tab.id : `saved-${Date.now()}`,
|
||||
name: values.name,
|
||||
sql: query,
|
||||
sql: getCurrentQuery(),
|
||||
connectionId: currentConnectionId,
|
||||
dbName: currentDb || tab.dbName || '',
|
||||
createdAt: Date.now()
|
||||
@@ -1271,9 +1494,24 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
]}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Button type="primary" icon={<PlayCircleOutlined />} onClick={handleRun} loading={loading}>
|
||||
运行
|
||||
</Button>
|
||||
<Button.Group>
|
||||
<Tooltip
|
||||
title={
|
||||
shortcutOptions.runQuery?.enabled && shortcutOptions.runQuery?.combo
|
||||
? `运行(${getShortcutDisplay(shortcutOptions.runQuery.combo)})`
|
||||
: '运行'
|
||||
}
|
||||
>
|
||||
<Button type="primary" icon={<PlayCircleOutlined />} onClick={handleRun} loading={loading}>
|
||||
运行
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{loading && (
|
||||
<Button type="primary" danger icon={<StopOutlined />} onClick={handleCancel}>
|
||||
停止
|
||||
</Button>
|
||||
)}
|
||||
</Button.Group>
|
||||
<Button icon={<SaveOutlined />} onClick={() => {
|
||||
saveForm.setFieldsValue({ name: tab.title.replace('Query (', '').replace(')', '') });
|
||||
setIsSaveModalOpen(true);
|
||||
@@ -1296,7 +1534,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
height="100%"
|
||||
defaultLanguage="sql"
|
||||
theme={darkMode ? "transparent-dark" : "transparent-light"}
|
||||
value={query}
|
||||
defaultValue={query}
|
||||
onChange={(val) => setQuery(val || '')}
|
||||
onMount={handleEditorDidMount}
|
||||
options={{
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useStore } from '../store';
|
||||
import { RedisKeyInfo, RedisValue, StreamEntry } from '../types';
|
||||
import Editor from '@monaco-editor/react';
|
||||
import type { DataNode } from 'antd/es/tree';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
@@ -399,7 +399,8 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const resolvedAppearance = resolveAppearanceValues(appearance);
|
||||
const opacity = normalizeOpacityForPlatform(resolvedAppearance.opacity);
|
||||
const connection = connections.find(c => c.id === connectionId);
|
||||
const keyAccentColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
const jsonAccentColor = darkMode ? '#f6c453' : '#1890ff';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -261,9 +261,18 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const darkMode = theme === 'dark';
|
||||
const resizeGuideColor = darkMode ? '#f6c453' : '#1890ff';
|
||||
const readOnly = !!tab.readOnly;
|
||||
const panelRadius = 10;
|
||||
const panelFrameColor = darkMode ? 'rgba(0, 0, 0, 0.18)' : 'rgba(0, 0, 0, 0.12)';
|
||||
const panelToolbarBorder = darkMode ? 'rgba(255, 255, 255, 0.12)' : 'rgba(0, 0, 0, 0.10)';
|
||||
const panelToolbarBg = darkMode ? 'rgba(20, 20, 20, 0.35)' : 'rgba(255, 255, 255, 0.72)';
|
||||
const panelBodyBg = darkMode ? 'rgba(0, 0, 0, 0.24)' : 'rgba(255, 255, 255, 0.82)';
|
||||
const focusRowBg = darkMode ? 'rgba(246, 196, 83, 0.22)' : 'rgba(24, 144, 255, 0.12)';
|
||||
|
||||
const [tableHeight, setTableHeight] = useState(500);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const pendingFocusColumnKeyRef = useRef<string | null>(null);
|
||||
const focusHighlightTimerRef = useRef<number | null>(null);
|
||||
const [focusColumnKey, setFocusColumnKey] = useState('');
|
||||
|
||||
const openCommentEditor = useCallback((record: EditableColumn) => {
|
||||
if (!record?._key) return;
|
||||
@@ -346,6 +355,61 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setSelectedColumnRowKeys(prev => prev.filter(key => columns.some(c => c._key === key)));
|
||||
}, [columns]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (focusHighlightTimerRef.current !== null) {
|
||||
window.clearTimeout(focusHighlightTimerRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const focusColumnRow = useCallback((targetKey: string): boolean => {
|
||||
if (activeKey !== 'columns') return false;
|
||||
const tableBody = containerRef.current?.querySelector('.ant-table-body') as HTMLElement | null;
|
||||
if (!tableBody) return false;
|
||||
const row = tableBody.querySelector(`tr[data-row-key="${targetKey}"]`) as HTMLTableRowElement | null;
|
||||
if (!row) return false;
|
||||
|
||||
row.scrollIntoView({ behavior: 'smooth', block: 'nearest' });
|
||||
setFocusColumnKey(targetKey);
|
||||
if (focusHighlightTimerRef.current !== null) {
|
||||
window.clearTimeout(focusHighlightTimerRef.current);
|
||||
}
|
||||
focusHighlightTimerRef.current = window.setTimeout(() => {
|
||||
setFocusColumnKey(prev => (prev === targetKey ? '' : prev));
|
||||
}, 1600);
|
||||
|
||||
if (!readOnly) {
|
||||
const firstInput = row.querySelector('input') as HTMLInputElement | null;
|
||||
if (firstInput) {
|
||||
firstInput.focus();
|
||||
firstInput.select();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}, [activeKey, readOnly]);
|
||||
|
||||
useEffect(() => {
|
||||
const pendingKey = pendingFocusColumnKeyRef.current;
|
||||
if (!pendingKey || activeKey !== 'columns') return;
|
||||
|
||||
let cancelled = false;
|
||||
const tryFocus = () => {
|
||||
if (cancelled) return;
|
||||
if (focusColumnRow(pendingKey)) {
|
||||
pendingFocusColumnKeyRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
const timerA = window.setTimeout(tryFocus, 0);
|
||||
const timerB = window.setTimeout(tryFocus, 96);
|
||||
return () => {
|
||||
cancelled = true;
|
||||
window.clearTimeout(timerA);
|
||||
window.clearTimeout(timerB);
|
||||
};
|
||||
}, [activeKey, columns, focusColumnRow]);
|
||||
|
||||
// Initial Columns Definition
|
||||
useEffect(() => {
|
||||
const initialCols = [
|
||||
@@ -886,21 +950,46 @@ ${selectedTrigger.statement}`;
|
||||
}));
|
||||
};
|
||||
|
||||
const handleAddColumn = () => {
|
||||
const newCol: EditableColumn = {
|
||||
name: isNewTable ? 'new_column' : `new_col_${columns.length + 1}`,
|
||||
type: 'varchar(255)',
|
||||
nullable: 'YES',
|
||||
key: '',
|
||||
extra: '',
|
||||
comment: '',
|
||||
default: '',
|
||||
_key: `new-${Date.now()}`,
|
||||
isNew: true,
|
||||
isAutoIncrement: false
|
||||
};
|
||||
setColumns([...columns, newCol]);
|
||||
};
|
||||
const createNewColumn = useCallback((indexHint: number): EditableColumn => ({
|
||||
name: isNewTable ? 'new_column' : `new_col_${indexHint}`,
|
||||
type: 'varchar(255)',
|
||||
nullable: 'YES',
|
||||
key: '',
|
||||
extra: '',
|
||||
comment: '',
|
||||
default: '',
|
||||
_key: `new-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
|
||||
isNew: true,
|
||||
isAutoIncrement: false
|
||||
}), [isNewTable]);
|
||||
|
||||
const handleAddColumn = useCallback((insertAfterKey?: string) => {
|
||||
const newCol = createNewColumn(columns.length + 1);
|
||||
setColumns(prev => {
|
||||
const next = [...prev];
|
||||
if (insertAfterKey) {
|
||||
const insertIndex = next.findIndex(col => col._key === insertAfterKey);
|
||||
if (insertIndex >= 0) {
|
||||
next.splice(insertIndex + 1, 0, newCol);
|
||||
return next;
|
||||
}
|
||||
}
|
||||
next.push(newCol);
|
||||
return next;
|
||||
});
|
||||
setSelectedColumnRowKeys([newCol._key]);
|
||||
pendingFocusColumnKeyRef.current = newCol._key;
|
||||
}, [columns.length, createNewColumn]);
|
||||
|
||||
const handleAddColumnAfterSelected = useCallback(() => {
|
||||
const selectedSet = new Set(selectedColumnRowKeys);
|
||||
const anchor = columns.find(col => selectedSet.has(col._key));
|
||||
if (!anchor) {
|
||||
message.warning('请先选择一个字段,再执行插入。');
|
||||
return;
|
||||
}
|
||||
handleAddColumn(anchor._key);
|
||||
}, [columns, handleAddColumn, selectedColumnRowKeys]);
|
||||
|
||||
const handleDeleteColumn = (key: string) => {
|
||||
setColumns(prev => prev.filter(c => c._key !== key));
|
||||
@@ -1920,22 +2009,35 @@ END;`;
|
||||
}));
|
||||
|
||||
const columnsTabContent = (
|
||||
<div ref={containerRef} className="table-designer-wrapper" style={{ height: '100%', overflow: 'hidden', position: 'relative' }}>
|
||||
<div
|
||||
ref={containerRef}
|
||||
className="table-designer-wrapper"
|
||||
style={{
|
||||
height: '100%',
|
||||
overflow: 'hidden',
|
||||
position: 'relative',
|
||||
background: panelBodyBg
|
||||
}}
|
||||
>
|
||||
<style>{`
|
||||
.table-designer-wrapper .ant-table-body {
|
||||
max-height: ${tableHeight}px !important;
|
||||
}
|
||||
}
|
||||
.table-designer-wrapper .table-designer-focus-row > .ant-table-cell {
|
||||
background: ${focusRowBg} !important;
|
||||
}
|
||||
`}</style>
|
||||
{readOnly ? (
|
||||
<Table
|
||||
dataSource={columns}
|
||||
columns={resizableColumns}
|
||||
rowKey="_key"
|
||||
rowClassName={(record: EditableColumn) => record._key === focusColumnKey ? 'table-designer-focus-row' : ''}
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
scroll={{ y: tableHeight }}
|
||||
bordered
|
||||
bordered={false}
|
||||
components={{
|
||||
header: {
|
||||
cell: ResizableTitle,
|
||||
@@ -1953,11 +2055,12 @@ END;`;
|
||||
onChange: (nextSelectedRowKeys) => setSelectedColumnRowKeys(nextSelectedRowKeys as string[]),
|
||||
}}
|
||||
rowKey="_key"
|
||||
rowClassName={(record: EditableColumn) => record._key === focusColumnKey ? 'table-designer-focus-row' : ''}
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
scroll={{ y: tableHeight }}
|
||||
bordered
|
||||
bordered={false}
|
||||
components={{
|
||||
body: { row: SortableRow },
|
||||
header: { cell: ResizableTitle }
|
||||
@@ -1985,8 +2088,63 @@ END;`;
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px', borderBottom: '1px solid #eee', display: 'flex', gap: '8px', alignItems: 'center' }}>
|
||||
<div className="table-designer-shell" style={{ display: 'flex', flexDirection: 'column', height: '100%', minHeight: 0, padding: '6px 0' }}>
|
||||
<style>{`
|
||||
.table-designer-shell .ant-table,
|
||||
.table-designer-shell .ant-table-wrapper,
|
||||
.table-designer-shell .ant-table-container {
|
||||
background: transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-wrapper,
|
||||
.table-designer-shell .ant-table-container {
|
||||
border: none !important;
|
||||
overflow: hidden !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-thead > tr > th {
|
||||
background: transparent !important;
|
||||
border-bottom: 1px solid ${darkMode ? 'rgba(255,255,255,0.06)' : 'rgba(0,0,0,0.06)'} !important;
|
||||
border-inline-end: 1px solid transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-tbody > tr > td,
|
||||
.table-designer-shell .ant-table-tbody .ant-table-row > .ant-table-cell {
|
||||
background: transparent !important;
|
||||
border-bottom: 1px solid ${darkMode ? 'rgba(255,255,255,0.05)' : 'rgba(0,0,0,0.05)'} !important;
|
||||
border-inline-end: 1px solid transparent !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-thead > tr > th::before {
|
||||
display: none !important;
|
||||
}
|
||||
.table-designer-shell .ant-table-tbody > tr:hover > td,
|
||||
.table-designer-shell .ant-table-tbody .ant-table-row:hover > .ant-table-cell {
|
||||
background: ${darkMode ? 'rgba(255,255,255,0.06)' : 'rgba(0,0,0,0.02)'} !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-nav {
|
||||
margin-bottom: 8px !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-nav::before {
|
||||
border-bottom-color: ${darkMode ? 'rgba(255,255,255,0.08)' : 'rgba(0,0,0,0.08)'} !important;
|
||||
}
|
||||
.table-designer-shell .ant-tabs-content-holder,
|
||||
.table-designer-shell .ant-tabs-content,
|
||||
.table-designer-shell .ant-tabs-tabpane {
|
||||
height: 100%;
|
||||
}
|
||||
`}</style>
|
||||
<div
|
||||
style={{
|
||||
padding: '10px 12px 8px 12px',
|
||||
borderBottom: `1px solid ${panelToolbarBorder}`,
|
||||
borderTopLeftRadius: panelRadius,
|
||||
borderTopRightRadius: panelRadius,
|
||||
borderLeft: `1px solid ${panelFrameColor}`,
|
||||
borderRight: `1px solid ${panelFrameColor}`,
|
||||
borderTop: `1px solid ${panelFrameColor}`,
|
||||
background: panelToolbarBg,
|
||||
display: 'flex',
|
||||
gap: '8px',
|
||||
alignItems: 'center'
|
||||
}}
|
||||
>
|
||||
{isNewTable && (
|
||||
<>
|
||||
<Input
|
||||
@@ -2014,14 +2172,25 @@ END;`;
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{!readOnly && <Button icon={<SaveOutlined />} type="primary" onClick={generateDDL}>保存</Button>}
|
||||
{!isNewTable && <Button icon={<ReloadOutlined />} onClick={fetchData}>刷新</Button>}
|
||||
{!readOnly && <Button size="small" icon={<SaveOutlined />} type="primary" onClick={generateDDL}>保存</Button>}
|
||||
{!isNewTable && <Button size="small" icon={<ReloadOutlined />} onClick={fetchData}>刷新</Button>}
|
||||
{!isNewTable && !readOnly && supportsTableCommentOps() && (
|
||||
<Button icon={<EditOutlined />} onClick={openTableCommentModal}>表备注</Button>
|
||||
<Button size="small" icon={<EditOutlined />} onClick={openTableCommentModal}>表备注</Button>
|
||||
)}
|
||||
{!readOnly && <Button icon={<PlusOutlined />} onClick={handleAddColumn}>添加字段</Button>}
|
||||
{!readOnly && <Button size="small" icon={<PlusOutlined />} onClick={() => handleAddColumn()}>添加字段</Button>}
|
||||
{!readOnly && (
|
||||
<Button
|
||||
size="small"
|
||||
icon={<PlusOutlined />}
|
||||
onClick={handleAddColumnAfterSelected}
|
||||
disabled={selectedColumnRowKeys.length === 0}
|
||||
>
|
||||
在选中字段后添加
|
||||
</Button>
|
||||
)}
|
||||
{!readOnly && (
|
||||
<Button
|
||||
size="small"
|
||||
icon={<CopyOutlined />}
|
||||
onClick={openCopySelectedColumnsModal}
|
||||
disabled={selectedColumns.length === 0}
|
||||
@@ -2034,7 +2203,17 @@ END;`;
|
||||
<Tabs
|
||||
activeKey={activeKey}
|
||||
onChange={setActiveKey}
|
||||
style={{ flex: 1, padding: '0 10px' }}
|
||||
style={{
|
||||
flex: 1,
|
||||
minHeight: 0,
|
||||
padding: '8px 10px 10px 10px',
|
||||
borderBottomLeftRadius: panelRadius,
|
||||
borderBottomRightRadius: panelRadius,
|
||||
borderLeft: `1px solid ${panelFrameColor}`,
|
||||
borderRight: `1px solid ${panelFrameColor}`,
|
||||
borderBottom: `1px solid ${panelFrameColor}`,
|
||||
background: panelBodyBg
|
||||
}}
|
||||
items={[
|
||||
{
|
||||
key: 'columns',
|
||||
@@ -2276,7 +2455,7 @@ END;`;
|
||||
label: 'DDL',
|
||||
icon: <FileTextOutlined />,
|
||||
children: (
|
||||
<div style={{ height: 'calc(100vh - 200px)', border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ height: 'calc(100vh - 200px)', border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
@@ -2312,7 +2491,7 @@ END;`;
|
||||
okText="应用"
|
||||
cancelText="取消"
|
||||
width={640}
|
||||
destroyOnClose
|
||||
destroyOnHidden
|
||||
>
|
||||
<Input.TextArea
|
||||
value={commentEditorValue}
|
||||
@@ -2517,7 +2696,7 @@ END;`;
|
||||
<span><strong>时机:</strong> {selectedTrigger.timing}</span>
|
||||
<span><strong>事件:</strong> {selectedTrigger.event}</span>
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
@@ -2553,7 +2732,7 @@ END;`;
|
||||
<span>修改触发器时会先删除原触发器,再创建新触发器。</span>
|
||||
)}
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<div style={{ border: `1px solid ${panelFrameColor}`, borderRadius: panelRadius, background: panelBodyBg }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { ConnectionConfig, ProxyConfig, SavedConnection, TabData, SavedQuery } from './types';
|
||||
import { ConnectionConfig, ProxyConfig, SavedConnection, TabData, SavedQuery, ConnectionTag } from './types';
|
||||
import {
|
||||
ShortcutAction,
|
||||
ShortcutBinding,
|
||||
ShortcutOptions,
|
||||
DEFAULT_SHORTCUT_OPTIONS,
|
||||
cloneShortcutOptions,
|
||||
sanitizeShortcutOptions,
|
||||
} from './utils/shortcuts';
|
||||
|
||||
const DEFAULT_APPEARANCE = { opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_APPEARANCE = { enabled: true, opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_UI_SCALE = 1.0;
|
||||
const MIN_UI_SCALE = 0.8;
|
||||
const MAX_UI_SCALE = 1.25;
|
||||
@@ -17,7 +25,7 @@ const MAX_HOST_ENTRY_LENGTH = 512;
|
||||
const MAX_HOST_ENTRIES = 64;
|
||||
const DEFAULT_TIMEOUT_SECONDS = 30;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const PERSIST_VERSION = 4;
|
||||
const PERSIST_VERSION = 6;
|
||||
const DEFAULT_CONNECTION_TYPE = 'mysql';
|
||||
const DEFAULT_GLOBAL_PROXY: GlobalProxyConfig = {
|
||||
enabled: false,
|
||||
@@ -48,6 +56,23 @@ const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'duckdb',
|
||||
'custom',
|
||||
]);
|
||||
const SSL_SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'dameng',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'sqlserver',
|
||||
'oracle',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'mongodb',
|
||||
'redis',
|
||||
'tdengine',
|
||||
]);
|
||||
|
||||
const getDefaultPortByType = (type: string): number => {
|
||||
switch (type) {
|
||||
@@ -177,6 +202,16 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
const defaultPort = getDefaultPortByType(type);
|
||||
const savePassword = typeof raw.savePassword === 'boolean' ? raw.savePassword : true;
|
||||
const mongoSrv = !!raw.mongoSrv;
|
||||
const sslCapable = SSL_SUPPORTED_CONNECTION_TYPES.has(type);
|
||||
const sslModeRaw = toTrimmedString(raw.sslMode, 'preferred').toLowerCase();
|
||||
const sslMode: 'preferred' | 'required' | 'skip-verify' | 'disable' =
|
||||
sslModeRaw === 'required'
|
||||
? 'required'
|
||||
: sslModeRaw === 'skip-verify'
|
||||
? 'skip-verify'
|
||||
: sslModeRaw === 'disable'
|
||||
? 'disable'
|
||||
: 'preferred';
|
||||
|
||||
const sshRaw = (raw.ssh && typeof raw.ssh === 'object') ? raw.ssh as Record<string, unknown> : {};
|
||||
const ssh = {
|
||||
@@ -196,6 +231,18 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
user: toTrimmedString(proxyRaw.user),
|
||||
password: toTrimmedString(proxyRaw.password),
|
||||
};
|
||||
const httpTunnelRaw = (raw.httpTunnel && typeof raw.httpTunnel === 'object')
|
||||
? raw.httpTunnel as Record<string, unknown>
|
||||
: ((raw.HTTPTunnel && typeof raw.HTTPTunnel === 'object') ? raw.HTTPTunnel as Record<string, unknown> : {});
|
||||
const httpTunnel = {
|
||||
host: toTrimmedString(httpTunnelRaw.host ?? raw.httpTunnelHost),
|
||||
port: normalizePort(httpTunnelRaw.port ?? raw.httpTunnelPort, 8080),
|
||||
user: toTrimmedString(httpTunnelRaw.user ?? raw.httpTunnelUser),
|
||||
password: toTrimmedString(httpTunnelRaw.password ?? raw.httpTunnelPassword),
|
||||
};
|
||||
const supportsNetworkTunnel = type !== 'sqlite' && type !== 'duckdb';
|
||||
const useHttpTunnel = supportsNetworkTunnel && (raw.useHttpTunnel === true || raw.UseHTTPTunnel === true);
|
||||
const useProxy = supportsNetworkTunnel && !!raw.useProxy && !useHttpTunnel;
|
||||
|
||||
const safeConfig: ConnectionConfig & Record<string, unknown> = {
|
||||
...raw,
|
||||
@@ -206,10 +253,16 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
password: savePassword ? toTrimmedString(raw.password) : '',
|
||||
savePassword,
|
||||
database: toTrimmedString(raw.database),
|
||||
useSSL: sslCapable ? !!raw.useSSL : false,
|
||||
sslMode: sslCapable ? sslMode : 'disable',
|
||||
sslCertPath: sslCapable ? toTrimmedString(raw.sslCertPath) : '',
|
||||
sslKeyPath: sslCapable ? toTrimmedString(raw.sslKeyPath) : '',
|
||||
useSSH: !!raw.useSSH,
|
||||
ssh,
|
||||
useProxy: !!raw.useProxy,
|
||||
useProxy,
|
||||
proxy,
|
||||
useHttpTunnel,
|
||||
httpTunnel,
|
||||
uri: toTrimmedString(raw.uri).slice(0, MAX_URI_LENGTH),
|
||||
hosts: sanitizeAddressList(raw.hosts),
|
||||
topology: raw.topology === 'replica' ? 'replica' : (raw.topology === 'cluster' ? 'cluster' : 'single'),
|
||||
@@ -293,6 +346,27 @@ const sanitizeConnections = (value: unknown): SavedConnection[] => {
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeConnectionTags = (value: unknown): ConnectionTag[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: ConnectionTag[] = [];
|
||||
const idSet = new Set<string>();
|
||||
|
||||
value.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') return;
|
||||
const raw = entry as Record<string, unknown>;
|
||||
const id = toTrimmedString(raw.id, `tag-${index + 1}`) || `tag-${index + 1}`;
|
||||
if (idSet.has(id)) return;
|
||||
idSet.add(id);
|
||||
|
||||
const name = toTrimmedString(raw.name, `标签-${index + 1}`) || `标签-${index + 1}`;
|
||||
const connectionIds = sanitizeStringArray(raw.connectionIds, 256);
|
||||
|
||||
result.push({ id, name, connectionIds });
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const isLegacyDefaultAppearance = (appearance: Partial<{ opacity: number; blur: number }> | undefined): boolean => {
|
||||
if (!appearance) {
|
||||
return true;
|
||||
@@ -325,18 +399,20 @@ export interface GlobalProxyConfig extends ProxyConfig {
|
||||
|
||||
interface AppState {
|
||||
connections: SavedConnection[];
|
||||
connectionTags: ConnectionTag[];
|
||||
tabs: TabData[];
|
||||
activeTabId: string | null;
|
||||
activeContext: { connectionId: string; dbName: string } | null;
|
||||
savedQueries: SavedQuery[];
|
||||
theme: 'light' | 'dark';
|
||||
appearance: { opacity: number; blur: number };
|
||||
appearance: { enabled: boolean; opacity: number; blur: number };
|
||||
uiScale: number;
|
||||
fontSize: number;
|
||||
startupFullscreen: boolean;
|
||||
globalProxy: GlobalProxyConfig;
|
||||
sqlFormatOptions: { keywordCase: 'upper' | 'lower' };
|
||||
queryOptions: QueryOptions;
|
||||
shortcutOptions: ShortcutOptions;
|
||||
sqlLogs: SqlLog[];
|
||||
tableAccessCount: Record<string, number>;
|
||||
tableSortPreference: Record<string, 'name' | 'frequency'>;
|
||||
@@ -345,6 +421,12 @@ interface AppState {
|
||||
updateConnection: (conn: SavedConnection) => void;
|
||||
removeConnection: (id: string) => void;
|
||||
|
||||
addConnectionTag: (tag: ConnectionTag) => void;
|
||||
updateConnectionTag: (tag: ConnectionTag) => void;
|
||||
removeConnectionTag: (id: string) => void;
|
||||
moveConnectionToTag: (connectionId: string, targetTagId: string | null) => void;
|
||||
reorderTags: (tagIds: string[]) => void;
|
||||
|
||||
addTab: (tab: TabData) => void;
|
||||
closeTab: (id: string) => void;
|
||||
closeOtherTabs: (id: string) => void;
|
||||
@@ -361,13 +443,15 @@ interface AppState {
|
||||
deleteQuery: (id: string) => void;
|
||||
|
||||
setTheme: (theme: 'light' | 'dark') => void;
|
||||
setAppearance: (appearance: Partial<{ opacity: number; blur: number }>) => void;
|
||||
setAppearance: (appearance: Partial<{ enabled: boolean; opacity: number; blur: number }>) => void;
|
||||
setUiScale: (scale: number) => void;
|
||||
setFontSize: (size: number) => void;
|
||||
setStartupFullscreen: (enabled: boolean) => void;
|
||||
setGlobalProxy: (proxy: Partial<GlobalProxyConfig>) => void;
|
||||
setSqlFormatOptions: (options: { keywordCase: 'upper' | 'lower' }) => void;
|
||||
setQueryOptions: (options: Partial<QueryOptions>) => void;
|
||||
updateShortcut: (action: ShortcutAction, binding: Partial<ShortcutBinding>) => void;
|
||||
resetShortcutOptions: () => void;
|
||||
|
||||
addSqlLog: (log: SqlLog) => void;
|
||||
clearSqlLogs: () => void;
|
||||
@@ -438,13 +522,14 @@ const sanitizeTableSortPreference = (value: unknown): Record<string, 'name' | 'f
|
||||
};
|
||||
|
||||
const sanitizeAppearance = (
|
||||
appearance: Partial<{ opacity: number; blur: number }> | undefined,
|
||||
appearance: Partial<{ enabled: boolean; opacity: number; blur: number }> | undefined,
|
||||
version: number
|
||||
): { opacity: number; blur: number } => {
|
||||
): { enabled: boolean; opacity: number; blur: number } => {
|
||||
if (!appearance || typeof appearance !== 'object') {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
const nextAppearance = {
|
||||
enabled: typeof appearance.enabled === 'boolean' ? appearance.enabled : DEFAULT_APPEARANCE.enabled,
|
||||
opacity: typeof appearance.opacity === 'number' ? appearance.opacity : DEFAULT_APPEARANCE.opacity,
|
||||
blur: typeof appearance.blur === 'number' ? appearance.blur : DEFAULT_APPEARANCE.blur,
|
||||
};
|
||||
@@ -496,6 +581,7 @@ export const useStore = create<AppState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
connections: [],
|
||||
connectionTags: [],
|
||||
tabs: [],
|
||||
activeTabId: null,
|
||||
activeContext: null,
|
||||
@@ -508,6 +594,7 @@ export const useStore = create<AppState>()(
|
||||
globalProxy: { ...DEFAULT_GLOBAL_PROXY },
|
||||
sqlFormatOptions: { keywordCase: 'upper' },
|
||||
queryOptions: { maxRows: 5000, showColumnComment: true, showColumnType: true },
|
||||
shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS),
|
||||
sqlLogs: [],
|
||||
tableAccessCount: {},
|
||||
tableSortPreference: {},
|
||||
@@ -516,7 +603,46 @@ export const useStore = create<AppState>()(
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
})),
|
||||
removeConnection: (id) => set((state) => ({ connections: state.connections.filter(c => c.id !== id) })),
|
||||
removeConnection: (id) => set((state) => ({
|
||||
connections: state.connections.filter(c => c.id !== id),
|
||||
connectionTags: state.connectionTags.map(tag => ({
|
||||
...tag,
|
||||
connectionIds: tag.connectionIds.filter(cid => cid !== id)
|
||||
}))
|
||||
})),
|
||||
|
||||
addConnectionTag: (tag) => set((state) => ({ connectionTags: [...state.connectionTags, tag] })),
|
||||
updateConnectionTag: (tag) => set((state) => ({
|
||||
connectionTags: state.connectionTags.map(t => t.id === tag.id ? tag : t)
|
||||
})),
|
||||
removeConnectionTag: (id) => set((state) => ({
|
||||
connectionTags: state.connectionTags.filter(t => t.id !== id)
|
||||
})),
|
||||
moveConnectionToTag: (connectionId, targetTagId) => set((state) => {
|
||||
const newTags = state.connectionTags.map(tag => {
|
||||
//先从所有tag中移除该connection
|
||||
const filteredIds = tag.connectionIds.filter(id => id !== connectionId);
|
||||
if (tag.id === targetTagId) {
|
||||
return { ...tag, connectionIds: [...filteredIds, connectionId] };
|
||||
}
|
||||
return { ...tag, connectionIds: filteredIds };
|
||||
});
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
reorderTags: (tagIds) => set((state) => {
|
||||
const tagMap = new Map(state.connectionTags.map(t => [t.id, t]));
|
||||
const newTags: ConnectionTag[] = [];
|
||||
tagIds.forEach(id => {
|
||||
const tag = tagMap.get(id);
|
||||
if (tag) {
|
||||
newTags.push(tag);
|
||||
tagMap.delete(id);
|
||||
}
|
||||
});
|
||||
// 追加未指定的tag(如果有的话)
|
||||
newTags.push(...Array.from(tagMap.values()));
|
||||
return { connectionTags: newTags };
|
||||
}),
|
||||
|
||||
addTab: (tab) => set((state) => {
|
||||
const index = state.tabs.findIndex(t => t.id === tab.id);
|
||||
@@ -640,6 +766,16 @@ export const useStore = create<AppState>()(
|
||||
setGlobalProxy: (proxy) => set((state) => ({ globalProxy: sanitizeGlobalProxy({ ...state.globalProxy, ...proxy }) })),
|
||||
setSqlFormatOptions: (options) => set({ sqlFormatOptions: options }),
|
||||
setQueryOptions: (options) => set((state) => ({ queryOptions: { ...state.queryOptions, ...options } })),
|
||||
updateShortcut: (action, binding) => set((state) => ({
|
||||
shortcutOptions: {
|
||||
...state.shortcutOptions,
|
||||
[action]: {
|
||||
...state.shortcutOptions[action],
|
||||
...binding,
|
||||
},
|
||||
},
|
||||
})),
|
||||
resetShortcutOptions: () => set({ shortcutOptions: cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS) }),
|
||||
|
||||
addSqlLog: (log) => set((state) => ({ sqlLogs: [log, ...state.sqlLogs].slice(0, 1000) })), // Keep last 1000 logs
|
||||
clearSqlLogs: () => set({ sqlLogs: [] }),
|
||||
@@ -672,6 +808,11 @@ export const useStore = create<AppState>()(
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
const nextState: Partial<AppState> = { ...state };
|
||||
nextState.connections = sanitizeConnections(state.connections);
|
||||
if (version < 5) {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
} else {
|
||||
nextState.connectionTags = sanitizeConnectionTags(state.connectionTags);
|
||||
}
|
||||
nextState.savedQueries = sanitizeSavedQueries(state.savedQueries);
|
||||
nextState.theme = sanitizeTheme(state.theme);
|
||||
nextState.appearance = sanitizeAppearance(state.appearance, version);
|
||||
@@ -681,6 +822,7 @@ export const useStore = create<AppState>()(
|
||||
nextState.globalProxy = sanitizeGlobalProxy(state.globalProxy);
|
||||
nextState.sqlFormatOptions = sanitizeSqlFormatOptions(state.sqlFormatOptions);
|
||||
nextState.queryOptions = sanitizeQueryOptions(state.queryOptions);
|
||||
nextState.shortcutOptions = sanitizeShortcutOptions(state.shortcutOptions);
|
||||
nextState.tableAccessCount = sanitizeTableAccessCount(state.tableAccessCount);
|
||||
nextState.tableSortPreference = sanitizeTableSortPreference(state.tableSortPreference);
|
||||
return nextState as AppState;
|
||||
@@ -691,6 +833,7 @@ export const useStore = create<AppState>()(
|
||||
...currentState,
|
||||
...state,
|
||||
connections: sanitizeConnections(state.connections),
|
||||
connectionTags: sanitizeConnectionTags(state.connectionTags),
|
||||
savedQueries: sanitizeSavedQueries(state.savedQueries),
|
||||
theme: sanitizeTheme(state.theme),
|
||||
appearance: sanitizeAppearance(state.appearance, PERSIST_VERSION),
|
||||
@@ -700,12 +843,14 @@ export const useStore = create<AppState>()(
|
||||
globalProxy: sanitizeGlobalProxy(state.globalProxy),
|
||||
sqlFormatOptions: sanitizeSqlFormatOptions(state.sqlFormatOptions),
|
||||
queryOptions: sanitizeQueryOptions(state.queryOptions),
|
||||
shortcutOptions: sanitizeShortcutOptions(state.shortcutOptions),
|
||||
tableAccessCount: sanitizeTableAccessCount(state.tableAccessCount),
|
||||
tableSortPreference: sanitizeTableSortPreference(state.tableSortPreference),
|
||||
};
|
||||
},
|
||||
partialize: (state) => ({
|
||||
connections: state.connections,
|
||||
connectionTags: state.connectionTags,
|
||||
savedQueries: state.savedQueries,
|
||||
theme: state.theme,
|
||||
appearance: state.appearance,
|
||||
@@ -715,6 +860,7 @@ export const useStore = create<AppState>()(
|
||||
globalProxy: state.globalProxy,
|
||||
sqlFormatOptions: state.sqlFormatOptions,
|
||||
queryOptions: state.queryOptions,
|
||||
shortcutOptions: state.shortcutOptions,
|
||||
tableAccessCount: state.tableAccessCount,
|
||||
tableSortPreference: state.tableSortPreference
|
||||
}), // Don't persist logs
|
||||
|
||||
@@ -14,6 +14,13 @@ export interface ProxyConfig {
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface ConnectionConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
@@ -22,10 +29,16 @@ export interface ConnectionConfig {
|
||||
password?: string;
|
||||
savePassword?: boolean;
|
||||
database?: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: 'preferred' | 'required' | 'skip-verify' | 'disable';
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH?: boolean;
|
||||
ssh?: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
@@ -61,6 +74,12 @@ export interface SavedConnection {
|
||||
includeRedisDatabases?: number[]; // Redis databases to show (0-15)
|
||||
}
|
||||
|
||||
export interface ConnectionTag {
|
||||
id: string;
|
||||
name: string;
|
||||
connectionIds: string[];
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
name: string;
|
||||
type: string;
|
||||
|
||||
@@ -10,6 +10,22 @@ const WINDOWS_BLUR_FACTOR = 1.00;
|
||||
|
||||
const clamp = (value: number, min: number, max: number) => Math.min(max, Math.max(min, value));
|
||||
|
||||
export interface AppearanceSettingsLike {
|
||||
enabled?: boolean;
|
||||
opacity?: number;
|
||||
blur?: number;
|
||||
}
|
||||
|
||||
export const resolveAppearanceValues = (appearance: AppearanceSettingsLike | undefined): { opacity: number; blur: number } => {
|
||||
if (!appearance || appearance.enabled !== false) {
|
||||
return {
|
||||
opacity: appearance?.opacity ?? DEFAULT_OPACITY,
|
||||
blur: appearance?.blur ?? 0,
|
||||
};
|
||||
}
|
||||
return { opacity: DEFAULT_OPACITY, blur: 0 };
|
||||
};
|
||||
|
||||
export const isMacLikePlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
|
||||
1014
frontend/src/utils/mongodb.ts
Normal file
1014
frontend/src/utils/mongodb.ts
Normal file
File diff suppressed because it is too large
Load Diff
258
frontend/src/utils/shortcuts.ts
Normal file
258
frontend/src/utils/shortcuts.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import type { KeyboardEvent as ReactKeyboardEvent } from 'react';
|
||||
|
||||
export type ShortcutAction =
|
||||
| 'runQuery'
|
||||
| 'focusSidebarSearch'
|
||||
| 'newQueryTab'
|
||||
| 'toggleLogPanel'
|
||||
| 'toggleTheme'
|
||||
| 'openShortcutManager';
|
||||
|
||||
export interface ShortcutBinding {
|
||||
combo: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
export type ShortcutOptions = Record<ShortcutAction, ShortcutBinding>;
|
||||
|
||||
export interface ShortcutActionMeta {
|
||||
label: string;
|
||||
description: string;
|
||||
allowInEditable?: boolean;
|
||||
}
|
||||
|
||||
const MODIFIER_ORDER = ['Ctrl', 'Meta', 'Alt', 'Shift'] as const;
|
||||
const MODIFIER_SET = new Set(MODIFIER_ORDER);
|
||||
|
||||
const KEY_ALIASES: Record<string, string> = {
|
||||
control: 'Ctrl',
|
||||
ctrl: 'Ctrl',
|
||||
command: 'Meta',
|
||||
cmd: 'Meta',
|
||||
meta: 'Meta',
|
||||
option: 'Alt',
|
||||
alt: 'Alt',
|
||||
shift: 'Shift',
|
||||
escape: 'Esc',
|
||||
esc: 'Esc',
|
||||
return: 'Enter',
|
||||
enter: 'Enter',
|
||||
tab: 'Tab',
|
||||
space: 'Space',
|
||||
' ': 'Space',
|
||||
backspace: 'Backspace',
|
||||
delete: 'Delete',
|
||||
del: 'Delete',
|
||||
arrowup: 'Up',
|
||||
up: 'Up',
|
||||
arrowdown: 'Down',
|
||||
down: 'Down',
|
||||
arrowleft: 'Left',
|
||||
left: 'Left',
|
||||
arrowright: 'Right',
|
||||
right: 'Right',
|
||||
pagedown: 'PageDown',
|
||||
pageup: 'PageUp',
|
||||
home: 'Home',
|
||||
end: 'End',
|
||||
insert: 'Insert',
|
||||
',': ',',
|
||||
'.': '.',
|
||||
'/': '/',
|
||||
';': ';',
|
||||
"'": "'",
|
||||
'[': '[',
|
||||
']': ']',
|
||||
'\\': '\\',
|
||||
'-': '-',
|
||||
'=': '=',
|
||||
'`': '`',
|
||||
};
|
||||
|
||||
export const SHORTCUT_ACTION_ORDER: ShortcutAction[] = [
|
||||
'runQuery',
|
||||
'focusSidebarSearch',
|
||||
'newQueryTab',
|
||||
'toggleLogPanel',
|
||||
'toggleTheme',
|
||||
'openShortcutManager',
|
||||
];
|
||||
|
||||
export const SHORTCUT_ACTION_META: Record<ShortcutAction, ShortcutActionMeta> = {
|
||||
runQuery: {
|
||||
label: '执行 SQL',
|
||||
description: '在当前查询页执行 SQL',
|
||||
},
|
||||
focusSidebarSearch: {
|
||||
label: '聚焦侧边栏搜索',
|
||||
description: '定位到左侧连接树搜索框',
|
||||
allowInEditable: true,
|
||||
},
|
||||
newQueryTab: {
|
||||
label: '新建查询页',
|
||||
description: '创建一个新的 SQL 查询标签页',
|
||||
},
|
||||
toggleLogPanel: {
|
||||
label: '切换日志面板',
|
||||
description: '打开或关闭 SQL 执行日志面板',
|
||||
},
|
||||
toggleTheme: {
|
||||
label: '切换主题',
|
||||
description: '在亮色和暗色主题之间切换',
|
||||
},
|
||||
openShortcutManager: {
|
||||
label: '打开快捷键管理',
|
||||
description: '打开快捷键设置面板',
|
||||
allowInEditable: true,
|
||||
},
|
||||
};
|
||||
|
||||
export const DEFAULT_SHORTCUT_OPTIONS: ShortcutOptions = {
|
||||
runQuery: { combo: 'Ctrl+Shift+R', enabled: true },
|
||||
focusSidebarSearch: { combo: 'Ctrl+F', enabled: true },
|
||||
newQueryTab: { combo: 'Ctrl+Shift+N', enabled: true },
|
||||
toggleLogPanel: { combo: 'Ctrl+Shift+L', enabled: true },
|
||||
toggleTheme: { combo: 'Ctrl+Shift+D', enabled: true },
|
||||
openShortcutManager: { combo: 'Ctrl+,', enabled: true },
|
||||
};
|
||||
|
||||
const normalizeKeyToken = (value: string): string => {
|
||||
const token = String(value || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (token.length === 1) {
|
||||
return token === '+' ? '+' : token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1).toLowerCase() : token;
|
||||
};
|
||||
|
||||
export const normalizeShortcutCombo = (combo: string): string => {
|
||||
const raw = String(combo || '').trim();
|
||||
if (!raw) return '';
|
||||
|
||||
const pieces = raw
|
||||
.split('+')
|
||||
.map(part => part.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
const modifiers: string[] = [];
|
||||
let key = '';
|
||||
|
||||
pieces.forEach((part) => {
|
||||
const normalized = normalizeKeyToken(part);
|
||||
if (!normalized) return;
|
||||
if (MODIFIER_SET.has(normalized as typeof MODIFIER_ORDER[number])) {
|
||||
if (!modifiers.includes(normalized)) {
|
||||
modifiers.push(normalized);
|
||||
}
|
||||
return;
|
||||
}
|
||||
key = normalized;
|
||||
});
|
||||
|
||||
modifiers.sort((a, b) => MODIFIER_ORDER.indexOf(a as typeof MODIFIER_ORDER[number]) - MODIFIER_ORDER.indexOf(b as typeof MODIFIER_ORDER[number]));
|
||||
if (!key) {
|
||||
return modifiers.join('+');
|
||||
}
|
||||
return [...modifiers, key].join('+');
|
||||
};
|
||||
|
||||
const normalizeKeyboardKey = (key: string): string => {
|
||||
const token = String(key || '').trim();
|
||||
if (!token) return '';
|
||||
const alias = KEY_ALIASES[token.toLowerCase()];
|
||||
if (alias) return alias;
|
||||
if (token.length === 1) {
|
||||
if (token === ' ') return 'Space';
|
||||
return token.toUpperCase();
|
||||
}
|
||||
if (/^f([1-9]|1[0-2])$/i.test(token)) {
|
||||
return token.toUpperCase();
|
||||
}
|
||||
return token.length > 1 ? token[0].toUpperCase() + token.slice(1) : token;
|
||||
};
|
||||
|
||||
export const eventToShortcut = (event: KeyboardEvent | ReactKeyboardEvent): string => {
|
||||
const key = normalizeKeyboardKey(event.key);
|
||||
if (!key || MODIFIER_SET.has(key as typeof MODIFIER_ORDER[number])) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const modifiers: string[] = [];
|
||||
if (event.ctrlKey) modifiers.push('Ctrl');
|
||||
if (event.metaKey) modifiers.push('Meta');
|
||||
if (event.altKey) modifiers.push('Alt');
|
||||
if (event.shiftKey) modifiers.push('Shift');
|
||||
|
||||
return normalizeShortcutCombo([...modifiers, key].join('+'));
|
||||
};
|
||||
|
||||
export const isShortcutMatch = (event: KeyboardEvent | ReactKeyboardEvent, combo: string): boolean => {
|
||||
const expected = normalizeShortcutCombo(combo);
|
||||
if (!expected) return false;
|
||||
const actual = eventToShortcut(event);
|
||||
return actual === expected;
|
||||
};
|
||||
|
||||
export const hasModifierKey = (combo: string): boolean => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
if (!normalized) return false;
|
||||
return normalized.split('+').some(part => MODIFIER_SET.has(part as typeof MODIFIER_ORDER[number]));
|
||||
};
|
||||
|
||||
export const cloneShortcutOptions = (value: ShortcutOptions): ShortcutOptions => {
|
||||
return SHORTCUT_ACTION_ORDER.reduce((acc, action) => {
|
||||
acc[action] = {
|
||||
combo: normalizeShortcutCombo(value[action]?.combo || DEFAULT_SHORTCUT_OPTIONS[action].combo),
|
||||
enabled: value[action]?.enabled !== false,
|
||||
};
|
||||
return acc;
|
||||
}, {} as ShortcutOptions);
|
||||
};
|
||||
|
||||
export const sanitizeShortcutOptions = (value: unknown): ShortcutOptions => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const defaults = cloneShortcutOptions(DEFAULT_SHORTCUT_OPTIONS);
|
||||
|
||||
SHORTCUT_ACTION_ORDER.forEach((action) => {
|
||||
const actionRaw = raw[action];
|
||||
if (!actionRaw || typeof actionRaw !== 'object') {
|
||||
return;
|
||||
}
|
||||
const binding = actionRaw as Record<string, unknown>;
|
||||
const combo = normalizeShortcutCombo(String(binding.combo || defaults[action].combo));
|
||||
defaults[action] = {
|
||||
combo: combo || defaults[action].combo,
|
||||
enabled: binding.enabled === false ? false : true,
|
||||
};
|
||||
});
|
||||
|
||||
return defaults;
|
||||
};
|
||||
|
||||
export const isEditableElement = (target: EventTarget | null): boolean => {
|
||||
if (!(target instanceof HTMLElement)) {
|
||||
return false;
|
||||
}
|
||||
const tag = target.tagName.toLowerCase();
|
||||
if (target.isContentEditable) {
|
||||
return true;
|
||||
}
|
||||
if (tag === 'input' || tag === 'textarea' || tag === 'select') {
|
||||
return true;
|
||||
}
|
||||
if (target.closest('.monaco-editor, .monaco-inputbox, .ant-select, .ant-picker, .ant-input')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export const getShortcutDisplay = (combo: string): string => {
|
||||
const normalized = normalizeShortcutCombo(combo);
|
||||
return normalized || '-';
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
export type FilterCondition = {
|
||||
id?: number;
|
||||
enabled?: boolean;
|
||||
logic?: 'AND' | 'OR';
|
||||
column?: string;
|
||||
op?: string;
|
||||
value?: string;
|
||||
@@ -142,8 +143,12 @@ export const parseListValues = (val: string) => {
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
const normalizeConditionLogic = (logic: unknown): 'AND' | 'OR' => {
|
||||
return String(logic || '').trim().toUpperCase() === 'OR' ? 'OR' : 'AND';
|
||||
};
|
||||
|
||||
export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) => {
|
||||
const whereParts: string[] = [];
|
||||
const whereParts: Array<{ expr: string; logic: 'AND' | 'OR' }> = [];
|
||||
|
||||
(conditions || []).forEach((cond) => {
|
||||
if (cond?.enabled === false) return;
|
||||
@@ -152,10 +157,17 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
const column = (cond?.column || '').trim();
|
||||
const value = (cond?.value ?? '').toString();
|
||||
const value2 = (cond?.value2 ?? '').toString();
|
||||
const logic = normalizeConditionLogic(cond?.logic);
|
||||
|
||||
const appendWherePart = (expr: string) => {
|
||||
const normalizedExpr = String(expr || '').trim();
|
||||
if (!normalizedExpr) return;
|
||||
whereParts.push({ expr: normalizedExpr, logic });
|
||||
};
|
||||
|
||||
if (op === 'CUSTOM') {
|
||||
const expr = value.trim();
|
||||
if (expr) whereParts.push(`(${expr})`);
|
||||
if (expr) appendWherePart(`(${expr})`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -165,80 +177,80 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
|
||||
switch (op) {
|
||||
case 'IS_NULL':
|
||||
whereParts.push(`${col} IS NULL`);
|
||||
appendWherePart(`${col} IS NULL`);
|
||||
return;
|
||||
case 'IS_NOT_NULL':
|
||||
whereParts.push(`${col} IS NOT NULL`);
|
||||
appendWherePart(`${col} IS NOT NULL`);
|
||||
return;
|
||||
case 'IS_EMPTY':
|
||||
// 兼容:空值通常理解为 NULL 或空字符串
|
||||
whereParts.push(`(${col} IS NULL OR ${col} = '')`);
|
||||
appendWherePart(`(${col} IS NULL OR ${col} = '')`);
|
||||
return;
|
||||
case 'IS_NOT_EMPTY':
|
||||
whereParts.push(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
appendWherePart(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
return;
|
||||
case 'BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
appendWherePart(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} IN (${list})`);
|
||||
appendWherePart(`${col} IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} NOT IN (${list})`);
|
||||
appendWherePart(`${col} NOT IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case '=':
|
||||
@@ -249,7 +261,7 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
case '>=': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
@@ -257,16 +269,23 @@ export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) =>
|
||||
if (op.toUpperCase() === 'LIKE') {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
appendWherePart(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
appendWherePart(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : '';
|
||||
if (whereParts.length === 0) return '';
|
||||
|
||||
let whereExpr = `(${whereParts[0].expr})`;
|
||||
for (let i = 1; i < whereParts.length; i++) {
|
||||
const part = whereParts[i];
|
||||
whereExpr = `(${whereExpr} ${part.logic} (${part.expr}))`;
|
||||
}
|
||||
return `WHERE ${whereExpr}`;
|
||||
};
|
||||
|
||||
9
frontend/wailsjs/go/app/App.d.ts
vendored
9
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -1,15 +1,20 @@
|
||||
// Cynhyrchwyd y ffeil hon yn awtomatig. PEIDIWCH Â MODIWL
|
||||
// This file is automatically generated. DO NOT EDIT
|
||||
import {connection} from '../models';
|
||||
import {time} from '../models';
|
||||
import {sync} from '../models';
|
||||
import {redis} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
export function CancelQuery(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckDriverNetworkStatus():Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckForUpdates():Promise<connection.QueryResult>;
|
||||
|
||||
export function CleanupStaleQueries(arg1:time.Duration):Promise<void>;
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ConfigureGlobalProxy(arg1:boolean,arg2:connection.ProxyConfig):Promise<connection.QueryResult>;
|
||||
@@ -36,6 +41,8 @@ export function DBQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string
|
||||
|
||||
export function DBQueryIsolated(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryWithCancel(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
@@ -68,6 +75,8 @@ export function ExportTablesDataSQL(arg1:connection.ConnectionConfig,arg2:string
|
||||
|
||||
export function ExportTablesSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>,arg4:boolean):Promise<connection.QueryResult>;
|
||||
|
||||
export function GenerateQueryID():Promise<string>;
|
||||
|
||||
export function GetAppInfo():Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverStatusList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -6,6 +6,10 @@ export function ApplyChanges(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ApplyChanges'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function CancelQuery(arg1) {
|
||||
return window['go']['app']['App']['CancelQuery'](arg1);
|
||||
}
|
||||
|
||||
export function CheckDriverNetworkStatus() {
|
||||
return window['go']['app']['App']['CheckDriverNetworkStatus']();
|
||||
}
|
||||
@@ -14,6 +18,10 @@ export function CheckForUpdates() {
|
||||
return window['go']['app']['App']['CheckForUpdates']();
|
||||
}
|
||||
|
||||
export function CleanupStaleQueries(arg1) {
|
||||
return window['go']['app']['App']['CleanupStaleQueries'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1) {
|
||||
return window['go']['app']['App']['ConfigureDriverRuntimeDirectory'](arg1);
|
||||
}
|
||||
@@ -66,6 +74,10 @@ export function DBQueryIsolated(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQueryIsolated'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryWithCancel(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DBQueryWithCancel'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DBShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
@@ -130,6 +142,10 @@ export function ExportTablesSQL(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTablesSQL'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function GenerateQueryID() {
|
||||
return window['go']['app']['App']['GenerateQueryID']();
|
||||
}
|
||||
|
||||
export function GetAppInfo() {
|
||||
return window['go']['app']['App']['GetAppInfo']();
|
||||
}
|
||||
|
||||
@@ -48,6 +48,24 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
export class HTTPTunnelConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new HTTPTunnelConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.host = source["host"];
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
}
|
||||
}
|
||||
export class ProxyConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
@@ -96,10 +114,16 @@ export namespace connection {
|
||||
password: string;
|
||||
savePassword?: boolean;
|
||||
database: string;
|
||||
useSSL?: boolean;
|
||||
sslMode?: string;
|
||||
sslCertPath?: string;
|
||||
sslKeyPath?: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
useHttpTunnel?: boolean;
|
||||
httpTunnel?: HTTPTunnelConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
@@ -130,10 +154,16 @@ export namespace connection {
|
||||
this.password = source["password"];
|
||||
this.savePassword = source["savePassword"];
|
||||
this.database = source["database"];
|
||||
this.useSSL = source["useSSL"];
|
||||
this.sslMode = source["sslMode"];
|
||||
this.sslCertPath = source["sslCertPath"];
|
||||
this.sslKeyPath = source["sslKeyPath"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.useProxy = source["useProxy"];
|
||||
this.proxy = this.convertValues(source["proxy"], ProxyConfig);
|
||||
this.useHttpTunnel = source["useHttpTunnel"];
|
||||
this.httpTunnel = this.convertValues(source["httpTunnel"], HTTPTunnelConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
@@ -171,11 +201,13 @@ export namespace connection {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class QueryResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
data: any;
|
||||
fields?: string[];
|
||||
queryId?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new QueryResult(source);
|
||||
@@ -187,6 +219,7 @@ export namespace connection {
|
||||
this.message = source["message"];
|
||||
this.data = source["data"];
|
||||
this.fields = source["fields"];
|
||||
this.queryId = source["queryId"];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
4
go.mod
4
go.mod
@@ -8,6 +8,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/microsoft/go-mssqldb v1.9.6
|
||||
@@ -16,6 +17,7 @@ require (
|
||||
github.com/taosdata/driver-go/v3 v3.7.8
|
||||
github.com/wailsapp/wails/v2 v2.11.0
|
||||
github.com/xuri/excelize/v2 v2.10.0
|
||||
go.mongodb.org/mongo-driver v1.17.9
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/mod v0.32.0
|
||||
@@ -49,7 +51,6 @@ require (
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/flatbuffers v25.12.19+incompatible // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
@@ -66,6 +67,7 @@ require (
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/paulmach/orb v0.12.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.25 // indirect
|
||||
|
||||
4
go.sum
4
go.sum
@@ -156,6 +156,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
@@ -248,6 +250,8 @@ github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN
|
||||
github.com/zeebo/xxh3 v1.1.0 h1:s7DLGDK45Dyfg7++yxI0khrfwq9661w9EN78eP/UZVs=
|
||||
github.com/zeebo/xxh3 v1.1.0/go.mod h1:IisAie1LELR4xhVinxWS5+zf1lA4p0MW4T+w+W07F5s=
|
||||
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
||||
go.mongodb.org/mongo-driver v1.17.9 h1:IexDdCuuNJ3BHrELgBlyaH9p60JXAvdzWR128q+U5tU=
|
||||
go.mongodb.org/mongo-driver v1.17.9/go.mod h1:LlOhpH5NUEfhxcAwG0UEkMqwYcc4JU18gtCdGudk/tQ=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const dbCachePingInterval = 30 * time.Second
|
||||
@@ -25,19 +26,27 @@ type cachedDatabase struct {
|
||||
lastPing time.Time
|
||||
}
|
||||
|
||||
type queryContext struct {
|
||||
cancel context.CancelFunc
|
||||
started time.Time
|
||||
}
|
||||
|
||||
// App struct
|
||||
type App struct {
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
queryMu sync.RWMutex
|
||||
runningQueries map[string]queryContext // queryID -> cancelFunc and start time
|
||||
}
|
||||
|
||||
// NewApp creates a new App application struct
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
runningQueries: make(map[string]queryContext),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +96,9 @@ func normalizeCacheKeyConfig(config connection.ConnectionConfig) connection.Conn
|
||||
if !normalized.UseProxy {
|
||||
normalized.Proxy = connection.ProxyConfig{}
|
||||
}
|
||||
if !normalized.UseHTTPTunnel {
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
if isFileDatabaseType(normalized.Type) {
|
||||
dsn := strings.TrimSpace(normalized.Host)
|
||||
@@ -115,6 +127,8 @@ func normalizeCacheKeyConfig(config connection.ConnectionConfig) connection.Conn
|
||||
normalized.MongoAuthMechanism = ""
|
||||
normalized.MongoReplicaUser = ""
|
||||
normalized.MongoReplicaPassword = ""
|
||||
normalized.UseHTTPTunnel = false
|
||||
normalized.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
}
|
||||
|
||||
return normalized
|
||||
@@ -139,6 +153,67 @@ func getCacheKey(config connection.ConnectionConfig) string {
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func shortCacheKey(cacheKey string) string {
|
||||
shortKey := cacheKey
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
return shortKey
|
||||
}
|
||||
|
||||
func shouldRefreshCachedConnection(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
normalized := strings.ToLower(normalizeErrorMessage(err))
|
||||
if normalized == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
patterns := []string{
|
||||
"invalid connection",
|
||||
"bad connection",
|
||||
"database is closed",
|
||||
"connection is already closed",
|
||||
"use of closed network connection",
|
||||
"broken pipe",
|
||||
"connection reset by peer",
|
||||
"server has gone away",
|
||||
"eof",
|
||||
}
|
||||
for _, pattern := range patterns {
|
||||
if strings.Contains(normalized, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *App) invalidateCachedDatabase(config connection.ConnectionConfig, reason error) bool {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := shortCacheKey(key)
|
||||
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
|
||||
entry, exists := a.dbCache[key]
|
||||
if !exists || entry.inst == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if closeErr := entry.inst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
}
|
||||
delete(a.dbCache, key)
|
||||
if reason != nil {
|
||||
logger.Errorf("检测到连接失效,已清理缓存连接:%s 缓存Key=%s 原因=%s", formatConnSummary(effectiveConfig), shortKey, normalizeErrorMessage(reason))
|
||||
} else {
|
||||
logger.Infof("已清理缓存连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
@@ -233,6 +308,12 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(fmt.Sprintf(" HTTP隧道=%s:%d", strings.TrimSpace(config.HTTPTunnel.Host), config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
if config.Type == "custom" {
|
||||
driver := strings.TrimSpace(config.Driver)
|
||||
@@ -408,3 +489,43 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
// generateQueryID generates a unique ID for a query using UUID v4
|
||||
func generateQueryID() string {
|
||||
return "query-" + uuid.New().String()
|
||||
}
|
||||
|
||||
// CancelQuery cancels a running query by its ID
|
||||
func (a *App) CancelQuery(queryID string) connection.QueryResult {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
if ctx, exists := a.runningQueries[queryID]; exists {
|
||||
ctx.cancel()
|
||||
delete(a.runningQueries, queryID)
|
||||
logger.Infof("查询已取消:queryID=%s", queryID)
|
||||
return connection.QueryResult{Success: true, Message: "查询已取消"}
|
||||
}
|
||||
logger.Warnf("取消查询失败:queryID=%s 不存在或已完成", queryID)
|
||||
return connection.QueryResult{Success: false, Message: "查询不存在或已完成"}
|
||||
}
|
||||
|
||||
// CleanupStaleQueries removes queries older than maxAge
|
||||
func (a *App) CleanupStaleQueries(maxAge time.Duration) {
|
||||
a.queryMu.Lock()
|
||||
defer a.queryMu.Unlock()
|
||||
|
||||
now := time.Now()
|
||||
for id, ctx := range a.runningQueries {
|
||||
if now.Sub(ctx.started) > maxAge {
|
||||
// Query likely finished or stuck, remove from tracking
|
||||
delete(a.runningQueries, id)
|
||||
// Query expired, silently remove
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateQueryID generates a unique query ID for cancellation tracking
|
||||
func (a *App) GenerateQueryID() string {
|
||||
return generateQueryID()
|
||||
}
|
||||
|
||||
@@ -36,6 +36,17 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
dbType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if dbType == "sqlserver" {
|
||||
// SQL Server 的 DB 接口约定:第一个参数是数据库名,schema 由 tableName(如 dbo.users) 自行解析。
|
||||
// 不能把 schema(dbo) 传到第一个参数,否则会拼出 dbo.sys.columns 等无效对象名。
|
||||
targetDB := rawDB
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(config.Database)
|
||||
}
|
||||
return targetDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
@@ -44,13 +55,10 @@ func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string,
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// PG/金仓/瀚高/海量:dbName 在 UI 里是"数据库",schema 需从 tableName 或使用默认 public。
|
||||
return "public", rawTable
|
||||
case "sqlserver":
|
||||
// SQL Server:dbName 表示数据库,schema 默认 dbo
|
||||
return "dbo", rawTable
|
||||
default:
|
||||
// MySQL:dbName 表示数据库;Oracle/达梦:dbName 表示 schema/owner。
|
||||
return rawDB, rawTable
|
||||
|
||||
51
internal/app/db_context_test.go
Normal file
51
internal/app/db_context_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerKeepsDatabaseAndQualifiedTable(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "master",
|
||||
}, "biz_db", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver first return value as database name, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_SQLServerFallbackToConfigDatabase(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schemaOrDb, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Database: "biz_db",
|
||||
}, "", "dbo.users")
|
||||
|
||||
if schemaOrDb != "biz_db" {
|
||||
t.Fatalf("expected sqlserver fallback database from config, got %q", schemaOrDb)
|
||||
}
|
||||
if table != "dbo.users" {
|
||||
t.Fatalf("expected sqlserver table name keep qualified form, got %q", table)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeSchemaAndTable_PostgresStillSplitsQualifiedName(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
schema, table := normalizeSchemaAndTable(connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "public.orders")
|
||||
|
||||
if schema != "public" || table != "orders" {
|
||||
t.Fatalf("expected postgres qualified split to public.orders, got %q.%q", schema, table)
|
||||
}
|
||||
}
|
||||
@@ -12,8 +12,35 @@ import (
|
||||
|
||||
func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.ConnectionConfig, error) {
|
||||
config := raw
|
||||
if config.UseHTTPTunnel {
|
||||
if config.UseProxy {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道与普通代理不能同时启用")
|
||||
}
|
||||
tunnelHost := strings.TrimSpace(config.HTTPTunnel.Host)
|
||||
if tunnelHost == "" {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道主机不能为空")
|
||||
}
|
||||
tunnelPort := config.HTTPTunnel.Port
|
||||
if tunnelPort <= 0 {
|
||||
tunnelPort = 8080
|
||||
}
|
||||
if tunnelPort > 65535 {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("HTTP 隧道端口无效:%d", config.HTTPTunnel.Port)
|
||||
}
|
||||
|
||||
config.UseProxy = true
|
||||
config.Proxy = connection.ProxyConfig{
|
||||
Type: "http",
|
||||
Host: tunnelHost,
|
||||
Port: tunnelPort,
|
||||
User: strings.TrimSpace(config.HTTPTunnel.User),
|
||||
Password: config.HTTPTunnel.Password,
|
||||
}
|
||||
}
|
||||
if !config.UseProxy {
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@@ -22,6 +49,8 @@ func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.Con
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Proxy = normalizedProxy
|
||||
config.UseHTTPTunnel = false
|
||||
config.HTTPTunnel = connection.HTTPTunnelConfig{}
|
||||
|
||||
if config.UseSSH {
|
||||
sshPort := config.SSH.Port
|
||||
|
||||
@@ -110,7 +110,7 @@ func (a *App) GetGlobalProxyConfig() connection.QueryResult {
|
||||
|
||||
func applyGlobalProxyToConnection(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
effective := config
|
||||
if effective.UseProxy {
|
||||
if effective.UseProxy || effective.UseHTTPTunnel {
|
||||
return effective
|
||||
}
|
||||
if isFileDatabaseType(effective.Type) {
|
||||
|
||||
@@ -376,12 +376,21 @@ func (a *App) MySQLShowCreateTable(config connection.ConnectionConfig, dbName st
|
||||
}
|
||||
|
||||
func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
return a.DBQueryWithCancel(config, dbName, query, "")
|
||||
}
|
||||
|
||||
func (a *App) DBQueryWithCancel(config connection.ConnectionConfig, dbName string, query string, queryID string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
// Generate query ID if not provided
|
||||
if queryID == "" {
|
||||
queryID = generateQueryID()
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
@@ -392,41 +401,75 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
isReadQuery := strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain")
|
||||
// MongoDB JSON 命令中的 find/count/aggregate 也属于读查询
|
||||
if !isReadQuery && strings.ToLower(strings.TrimSpace(runConfig.Type)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
isReadQuery = true
|
||||
// Store cancel function for potential manual cancellation
|
||||
a.queryMu.Lock()
|
||||
a.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
if q, ok := dbInst.(interface {
|
||||
a.queryMu.Unlock()
|
||||
|
||||
// Ensure query is removed from tracking when done
|
||||
defer func() {
|
||||
a.queryMu.Lock()
|
||||
delete(a.runningQueries, queryID)
|
||||
a.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
runReadQuery := func(inst db.Database) ([]map[string]interface{}, []string, error) {
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
data, columns, err = q.QueryContext(ctx, query)
|
||||
} else {
|
||||
data, columns, err = dbInst.Query(query)
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
runExecQuery := func(inst db.Database) (int64, error) {
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
|
||||
if isReadQuery {
|
||||
data, columns, err := runReadQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
data, columns, err = runReadQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns, QueryID: queryID}
|
||||
} else {
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
affected, err := runExecQuery(dbInst)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBQuery 重建连接失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
affected, err = runExecQuery(retryInst)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
return connection.QueryResult{Success: false, Message: err.Error(), QueryID: queryID}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}, QueryID: queryID}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,11 +495,7 @@ func (a *App) DBQueryIsolated(config connection.ConnectionConfig, dbName string,
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
isReadQuery := strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain")
|
||||
if !isReadQuery && strings.ToLower(strings.TrimSpace(runConfig.Type)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
isReadQuery = true
|
||||
}
|
||||
isReadQuery := isReadOnlySQLQuery(runConfig.Type, query)
|
||||
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
@@ -499,20 +538,38 @@ func sqlSnippet(query string) string {
|
||||
return q[:max] + "..."
|
||||
}
|
||||
|
||||
func ensureNonNilSlice[T any](items []T) []T {
|
||||
if items == nil {
|
||||
return make([]T, 0)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.QueryResult {
|
||||
dbInst, err := a.getDatabase(config)
|
||||
runConfig := normalizeRunConfig(config, "")
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
dbs, err := dbInst.GetDatabases()
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetDatabases 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
dbs, err = retryInst.GetDatabases()
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(config))
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
resData := make([]map[string]string, 0, len(dbs))
|
||||
for _, name := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": name})
|
||||
}
|
||||
@@ -530,12 +587,22 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
tables, err := dbInst.GetTables(dbName)
|
||||
if err != nil && shouldRefreshCachedConnection(err) {
|
||||
if a.invalidateCachedDatabase(runConfig, err) {
|
||||
retryInst, retryErr := a.getDatabaseForcePing(runConfig)
|
||||
if retryErr != nil {
|
||||
logger.Error(retryErr, "DBGetTables 重建连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: retryErr.Error()}
|
||||
}
|
||||
tables, err = retryInst.GetTables(dbName)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取表列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
var resData []map[string]string
|
||||
resData := make([]map[string]string, 0, len(tables))
|
||||
for _, name := range tables {
|
||||
resData = append(resData, map[string]string{"Table": name})
|
||||
}
|
||||
@@ -717,7 +784,7 @@ func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: columns}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(columns)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -734,7 +801,7 @@ func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, ta
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: indexes}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(indexes)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -751,7 +818,7 @@ func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: fks}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(fks)}
|
||||
}
|
||||
|
||||
func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
@@ -768,7 +835,7 @@ func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, t
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: triggers}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(triggers)}
|
||||
}
|
||||
|
||||
func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewName string) connection.QueryResult {
|
||||
@@ -906,5 +973,5 @@ func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: cols}
|
||||
return connection.QueryResult{Success: true, Data: ensureNonNilSlice(cols)}
|
||||
}
|
||||
|
||||
149
internal/app/methods_db_cancel_test.go
Normal file
149
internal/app/methods_db_cancel_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGenerateQueryID(t *testing.T) {
|
||||
app := NewApp()
|
||||
id := app.GenerateQueryID()
|
||||
if id == "" {
|
||||
t.Fatal("GenerateQueryID returned empty string")
|
||||
}
|
||||
// Should start with "query-"
|
||||
if !strings.HasPrefix(id, "query-") {
|
||||
t.Fatalf("Expected query ID to start with 'query-', got: %s", id)
|
||||
}
|
||||
// Should be reasonably unique (not equal to another generated ID)
|
||||
id2 := app.GenerateQueryID()
|
||||
if id == id2 {
|
||||
t.Fatal("Two consecutive GenerateQueryID calls returned identical IDs")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_NonExistent(t *testing.T) {
|
||||
app := NewApp()
|
||||
res := app.CancelQuery("non-existent-query-id")
|
||||
if res.Success {
|
||||
t.Fatal("CancelQuery should fail for non-existent query ID")
|
||||
}
|
||||
if !strings.Contains(res.Message, "不存在") && !strings.Contains(res.Message, "not exist") {
|
||||
t.Fatalf("Expected error message about query not existing, got: %s", res.Message)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCancelQuery_ValidQuery(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// First, generate a query ID and simulate a running query
|
||||
queryID := app.GenerateQueryID()
|
||||
|
||||
// Store a cancel function in runningQueries map
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now(),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Ensure cleanup after test
|
||||
defer func() {
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, queryID)
|
||||
app.queryMu.Unlock()
|
||||
}()
|
||||
|
||||
// Cancel the query
|
||||
res := app.CancelQuery(queryID)
|
||||
if !res.Success {
|
||||
t.Fatalf("CancelQuery should succeed for valid query ID, got: %s", res.Message)
|
||||
}
|
||||
|
||||
// Verify query removed from map
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Query should be removed from runningQueries after cancellation")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanupStaleQueries(t *testing.T) {
|
||||
app := NewApp()
|
||||
|
||||
// Add a stale query (started 2 hours ago)
|
||||
queryID := app.GenerateQueryID()
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[queryID] = queryContext{
|
||||
cancel: cancel,
|
||||
started: time.Now().Add(-2 * time.Hour),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify stale query was removed
|
||||
app.queryMu.Lock()
|
||||
_, exists := app.runningQueries[queryID]
|
||||
app.queryMu.Unlock()
|
||||
if exists {
|
||||
t.Fatal("Stale query should be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Add a fresh query (started 30 minutes ago)
|
||||
freshID := app.GenerateQueryID()
|
||||
_, cancel2 := context.WithCancel(context.Background())
|
||||
app.queryMu.Lock()
|
||||
app.runningQueries[freshID] = queryContext{
|
||||
cancel: cancel2,
|
||||
started: time.Now().Add(-30 * time.Minute),
|
||||
}
|
||||
app.queryMu.Unlock()
|
||||
defer cancel2()
|
||||
|
||||
// Cleanup queries older than 1 hour
|
||||
app.CleanupStaleQueries(1 * time.Hour)
|
||||
|
||||
// Verify fresh query still exists
|
||||
app.queryMu.Lock()
|
||||
_, exists = app.runningQueries[freshID]
|
||||
app.queryMu.Unlock()
|
||||
if !exists {
|
||||
t.Fatal("Fresh query should not be removed by CleanupStaleQueries")
|
||||
}
|
||||
|
||||
// Clean up
|
||||
app.queryMu.Lock()
|
||||
delete(app.runningQueries, freshID)
|
||||
app.queryMu.Unlock()
|
||||
}
|
||||
|
||||
func TestDBQueryWithCancel_QueryIDPropagation(t *testing.T) {
|
||||
// This test verifies that query ID is properly propagated in QueryResult
|
||||
// Since we can't easily mock database connections, we'll test the integration
|
||||
// by checking that DBQueryWithCancel returns a QueryResult with QueryID field
|
||||
|
||||
app := NewApp()
|
||||
|
||||
// Create a minimal config for a database type that doesn't require actual connection
|
||||
config := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: ":memory:", // In-memory duckdb for testing
|
||||
}
|
||||
|
||||
// This will fail because we can't actually connect, but we can test the error path
|
||||
result := app.DBQueryWithCancel(config, "", "SELECT 1", "test-query-id")
|
||||
|
||||
// The query should fail (no actual database), but QueryID should be present
|
||||
if result.QueryID != "test-query-id" {
|
||||
t.Fatalf("Expected QueryID 'test-query-id' in result, got: %s", result.QueryID)
|
||||
}
|
||||
}
|
||||
@@ -304,13 +304,33 @@ var driverGoModulePathMap = map[string]string{
|
||||
"clickhouse": "github.com/ClickHouse/clickhouse-go/v2",
|
||||
}
|
||||
|
||||
var driverGoModuleAliasPathMap = map[string][]string{
|
||||
"mongodb": {
|
||||
"go.mongodb.org/mongo-driver",
|
||||
},
|
||||
}
|
||||
|
||||
var driverExtraHistoryLimitMap = map[string]int{
|
||||
"mongodb": 10,
|
||||
}
|
||||
|
||||
var fallbackRecentDriverVersionsMap = map[string][]goModuleVersionMeta{
|
||||
"mongodb": {
|
||||
{Version: "2.5.0"},
|
||||
{Version: "2.4.2"},
|
||||
{Version: "2.4.1"},
|
||||
{Version: "2.4.0"},
|
||||
{Version: "2.3.1"},
|
||||
{Version: "2.3.0"},
|
||||
{Version: "2.2.3"},
|
||||
{Version: "1.17.9"},
|
||||
{Version: "1.17.8"},
|
||||
{Version: "1.17.7"},
|
||||
{Version: "1.17.6"},
|
||||
{Version: "1.17.4"},
|
||||
{Version: "1.17.3"},
|
||||
{Version: "1.17.2"},
|
||||
{Version: "1.17.1"},
|
||||
{Version: "1.17.0"},
|
||||
{Version: "1.16.1"},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -696,11 +716,11 @@ func (a *App) CheckDriverNetworkStatus() connection.QueryResult {
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"reachable": allReachable,
|
||||
"summary": summary,
|
||||
"recommendedProxy": !allReachable,
|
||||
"proxyConfigured": proxyConfigured,
|
||||
"proxyEnv": proxyEnv,
|
||||
"reachable": allReachable,
|
||||
"summary": summary,
|
||||
"recommendedProxy": !allReachable,
|
||||
"proxyConfigured": proxyConfigured,
|
||||
"proxyEnv": proxyEnv,
|
||||
"downloadChainReachable": downloadChainReachable,
|
||||
"downloadRequiredHosts": []string{
|
||||
"github.com",
|
||||
@@ -709,8 +729,8 @@ func (a *App) CheckDriverNetworkStatus() connection.QueryResult {
|
||||
"objects.githubusercontent.com",
|
||||
"raw.githubusercontent.com",
|
||||
},
|
||||
"checkedAt": time.Now().Format(time.RFC3339),
|
||||
"checks": checks,
|
||||
"checkedAt": time.Now().Format(time.RFC3339),
|
||||
"checks": checks,
|
||||
}
|
||||
if logPath := strings.TrimSpace(logger.Path()); logPath != "" {
|
||||
data["logPath"] = logPath
|
||||
@@ -1600,17 +1620,57 @@ func resolveRecentDriverVersionMetas(driverType string, limit int) []goModuleVer
|
||||
if normalized == "" {
|
||||
return nil
|
||||
}
|
||||
if modulePath := strings.TrimSpace(driverGoModulePathMap[normalized]); modulePath != "" {
|
||||
if metas := fetchGoModuleVersionMetasCached(modulePath); len(metas) > 0 {
|
||||
if len(metas) > limit {
|
||||
return append([]goModuleVersionMeta(nil), metas[:limit]...)
|
||||
modulePaths := resolveDriverGoModulePaths(normalized)
|
||||
if len(modulePaths) > 0 {
|
||||
result := make([]goModuleVersionMeta, 0, limit)
|
||||
seen := make(map[string]struct{}, limit)
|
||||
appendUnique := func(values []goModuleVersionMeta, maxAppend int) {
|
||||
if maxAppend <= 0 {
|
||||
return
|
||||
}
|
||||
return append([]goModuleVersionMeta(nil), metas...)
|
||||
appended := 0
|
||||
for _, meta := range values {
|
||||
version := normalizeVersion(strings.TrimSpace(meta.Version))
|
||||
if version == "" {
|
||||
continue
|
||||
}
|
||||
key := strings.ToLower(version)
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
meta.Version = version
|
||||
result = append(result, meta)
|
||||
seen[key] = struct{}{}
|
||||
appended++
|
||||
if appended >= maxAppend {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
appendUnique(fetchGoModuleVersionMetasCached(modulePaths[0]), limit)
|
||||
|
||||
extraLimit := resolveDriverExtraHistoryLimit(normalized)
|
||||
for _, modulePath := range modulePaths[1:] {
|
||||
if extraLimit <= 0 {
|
||||
break
|
||||
}
|
||||
before := len(result)
|
||||
appendUnique(fetchGoModuleVersionMetasCached(modulePath), extraLimit)
|
||||
extraLimit -= len(result) - before
|
||||
}
|
||||
if len(result) > 0 {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
fallbackLimit := limit + resolveDriverExtraHistoryLimit(normalized)
|
||||
if fallbackLimit <= 0 {
|
||||
fallbackLimit = limit
|
||||
}
|
||||
if fallback := fallbackRecentDriverVersionsMap[normalized]; len(fallback) > 0 {
|
||||
if len(fallback) > limit {
|
||||
return append([]goModuleVersionMeta(nil), fallback[:limit]...)
|
||||
if len(fallback) > fallbackLimit {
|
||||
return append([]goModuleVersionMeta(nil), fallback[:fallbackLimit]...)
|
||||
}
|
||||
return append([]goModuleVersionMeta(nil), fallback...)
|
||||
}
|
||||
@@ -1635,15 +1695,13 @@ func triggerDriverVersionMetadataWarmup(definitions []driverDefinition) {
|
||||
if driverType == "" || !db.IsOptionalGoDriver(driverType) {
|
||||
continue
|
||||
}
|
||||
modulePath := strings.TrimSpace(driverGoModulePathMap[driverType])
|
||||
if modulePath == "" {
|
||||
continue
|
||||
for _, modulePath := range resolveDriverGoModulePaths(driverType) {
|
||||
if _, ok := seenModule[modulePath]; ok {
|
||||
continue
|
||||
}
|
||||
seenModule[modulePath] = struct{}{}
|
||||
modulePaths = append(modulePaths, modulePath)
|
||||
}
|
||||
if _, ok := seenModule[modulePath]; ok {
|
||||
continue
|
||||
}
|
||||
seenModule[modulePath] = struct{}{}
|
||||
modulePaths = append(modulePaths, modulePath)
|
||||
}
|
||||
|
||||
if len(modulePaths) == 0 {
|
||||
@@ -1663,6 +1721,40 @@ func triggerDriverVersionMetadataWarmup(definitions []driverDefinition) {
|
||||
}(append([]string(nil), modulePaths...))
|
||||
}
|
||||
|
||||
func resolveDriverGoModulePaths(driverType string) []string {
|
||||
normalized := normalizeDriverType(driverType)
|
||||
if normalized == "" {
|
||||
return nil
|
||||
}
|
||||
paths := make([]string, 0, 3)
|
||||
seen := make(map[string]struct{}, 3)
|
||||
appendPath := func(path string) {
|
||||
trimmed := strings.TrimSpace(path)
|
||||
if trimmed == "" {
|
||||
return
|
||||
}
|
||||
if _, ok := seen[trimmed]; ok {
|
||||
return
|
||||
}
|
||||
seen[trimmed] = struct{}{}
|
||||
paths = append(paths, trimmed)
|
||||
}
|
||||
|
||||
appendPath(driverGoModulePathMap[normalized])
|
||||
for _, alias := range driverGoModuleAliasPathMap[normalized] {
|
||||
appendPath(alias)
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
func resolveDriverExtraHistoryLimit(driverType string) int {
|
||||
limit := driverExtraHistoryLimitMap[normalizeDriverType(driverType)]
|
||||
if limit < 0 {
|
||||
return 0
|
||||
}
|
||||
return limit
|
||||
}
|
||||
|
||||
func tryStartDriverVersionMetadataWarmup(now time.Time) bool {
|
||||
driverVersionWarmupMu.Lock()
|
||||
defer driverVersionWarmupMu.Unlock()
|
||||
@@ -2356,16 +2448,23 @@ func hashFileSHA256(filePath string) (string, error) {
|
||||
|
||||
func installOptionalDriverAgentPackage(a *App, definition driverDefinition, selectedVersion string, resolvedDir string, downloadURL string) (installedDriverPackage, error) {
|
||||
driverType := normalizeDriverType(definition.Type)
|
||||
executablePath, err := db.ResolveOptionalDriverAgentExecutablePath(resolvedDir, driverType)
|
||||
installPath, err := db.ResolveOptionalDriverAgentExecutablePathForVersion(resolvedDir, driverType, selectedVersion)
|
||||
if err != nil {
|
||||
return installedDriverPackage{}, err
|
||||
}
|
||||
downloadSource, hash, err := ensureOptionalDriverAgentBinary(a, definition, executablePath, downloadURL)
|
||||
runtimePath, err := db.ResolveOptionalDriverAgentExecutablePath(resolvedDir, driverType)
|
||||
if err != nil {
|
||||
return installedDriverPackage{}, err
|
||||
}
|
||||
downloadSource, hash, err := ensureOptionalDriverAgentBinary(a, definition, installPath, downloadURL, selectedVersion)
|
||||
if err != nil {
|
||||
return installedDriverPackage{}, err
|
||||
}
|
||||
if activateErr := activateOptionalDriverAgentBinary(installPath, runtimePath); activateErr != nil {
|
||||
return installedDriverPackage{}, fmt.Errorf("activate %s driver agent failed: %w", resolveDriverDisplayName(definition), activateErr)
|
||||
}
|
||||
if strings.TrimSpace(hash) == "" {
|
||||
hash, err = hashFileSHA256(executablePath)
|
||||
hash, err = hashFileSHA256(installPath)
|
||||
if err != nil {
|
||||
return installedDriverPackage{}, fmt.Errorf("计算 %s 驱动代理摘要失败:%w", resolveDriverDisplayName(definition), err)
|
||||
}
|
||||
@@ -2376,9 +2475,9 @@ func installOptionalDriverAgentPackage(a *App, definition driverDefinition, sele
|
||||
return installedDriverPackage{
|
||||
DriverType: driverType,
|
||||
Version: strings.TrimSpace(selectedVersion),
|
||||
FilePath: executablePath,
|
||||
FileName: filepath.Base(executablePath),
|
||||
ExecutablePath: executablePath,
|
||||
FilePath: installPath,
|
||||
FileName: filepath.Base(installPath),
|
||||
ExecutablePath: runtimePath,
|
||||
DownloadURL: strings.TrimSpace(downloadSource),
|
||||
SHA256: hash,
|
||||
DownloadedAt: time.Now().Format(time.RFC3339),
|
||||
@@ -2437,6 +2536,9 @@ func installOptionalDriverAgentFromLocalPath(definition driverDefinition, filePa
|
||||
return installedDriverPackage{}, fmt.Errorf("导入本地驱动代理失败:%w", copyErr)
|
||||
}
|
||||
}
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, executablePath); validateErr != nil {
|
||||
return installedDriverPackage{}, validateErr
|
||||
}
|
||||
|
||||
hash, hashErr := hashFileSHA256(executablePath)
|
||||
if hashErr != nil {
|
||||
@@ -2686,17 +2788,23 @@ func installOptionalDriverAgentFromLocalZip(zipPath string, definition driverDef
|
||||
return filepath.ToSlash(strings.TrimPrefix(strings.TrimSpace(entry.Name), "./")), nil
|
||||
}
|
||||
|
||||
func ensureOptionalDriverAgentBinary(a *App, definition driverDefinition, executablePath string, downloadURL string) (string, string, error) {
|
||||
func ensureOptionalDriverAgentBinary(a *App, definition driverDefinition, executablePath string, downloadURL string, selectedVersion string) (string, string, error) {
|
||||
driverType := normalizeDriverType(definition.Type)
|
||||
displayName := resolveDriverDisplayName(definition)
|
||||
forceSourceBuild := shouldForceSourceBuildForVersion(driverType, selectedVersion)
|
||||
skipReuseCandidate := shouldSkipReusableAgentCandidate(driverType, selectedVersion)
|
||||
|
||||
info, err := os.Stat(executablePath)
|
||||
if err == nil && !info.IsDir() {
|
||||
hash, hashErr := hashFileSHA256(executablePath)
|
||||
if hashErr != nil {
|
||||
return "", "", fmt.Errorf("读取已安装 %s 驱动代理摘要失败:%w", displayName, hashErr)
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, executablePath); validateErr != nil {
|
||||
_ = os.Remove(executablePath)
|
||||
} else {
|
||||
hash, hashErr := hashFileSHA256(executablePath)
|
||||
if hashErr != nil {
|
||||
return "", "", fmt.Errorf("读取已安装 %s 驱动代理摘要失败:%w", displayName, hashErr)
|
||||
}
|
||||
return fmt.Sprintf("local://existing/%s-driver-agent", driverType), hash, nil
|
||||
}
|
||||
return fmt.Sprintf("local://existing/%s-driver-agent", driverType), hash, nil
|
||||
}
|
||||
if err == nil && info.IsDir() {
|
||||
return "", "", fmt.Errorf("%s 驱动代理路径被目录占用:%s", displayName, executablePath)
|
||||
@@ -2708,49 +2816,57 @@ func ensureOptionalDriverAgentBinary(a *App, definition driverDefinition, execut
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 10, 100, "检查本地驱动代理缓存")
|
||||
}
|
||||
if sourcePath, ok := findExistingOptionalDriverAgentCandidate(definition, executablePath); ok {
|
||||
if copyErr := copyAgentBinary(sourcePath, executablePath); copyErr != nil {
|
||||
return "", "", fmt.Errorf("复制预置 %s 驱动代理失败:%w", displayName, copyErr)
|
||||
if !skipReuseCandidate {
|
||||
if sourcePath, ok := findExistingOptionalDriverAgentCandidate(definition, executablePath); ok {
|
||||
if copyErr := copyAgentBinary(sourcePath, executablePath); copyErr != nil {
|
||||
return "", "", fmt.Errorf("复制预置 %s 驱动代理失败:%w", displayName, copyErr)
|
||||
}
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, executablePath); validateErr != nil {
|
||||
_ = os.Remove(executablePath)
|
||||
return "", "", validateErr
|
||||
}
|
||||
hash, hashErr := hashFileSHA256(executablePath)
|
||||
if hashErr != nil {
|
||||
return "", "", fmt.Errorf("计算预置 %s 驱动代理摘要失败:%w", displayName, hashErr)
|
||||
}
|
||||
return "file://" + sourcePath, hash, nil
|
||||
}
|
||||
hash, hashErr := hashFileSHA256(executablePath)
|
||||
if hashErr != nil {
|
||||
return "", "", fmt.Errorf("计算预置 %s 驱动代理摘要失败:%w", displayName, hashErr)
|
||||
}
|
||||
return "file://" + sourcePath, hash, nil
|
||||
}
|
||||
|
||||
downloadURLs := resolveOptionalDriverAgentDownloadURLs(definition, downloadURL)
|
||||
var downloadErrs []string
|
||||
if len(downloadURLs) > 0 {
|
||||
for _, candidateURL := range downloadURLs {
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 20, 100, fmt.Sprintf("下载预编译 %s 驱动代理", displayName))
|
||||
if !forceSourceBuild {
|
||||
downloadURLs := resolveOptionalDriverAgentDownloadURLs(definition, downloadURL)
|
||||
if len(downloadURLs) > 0 {
|
||||
for _, candidateURL := range downloadURLs {
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 20, 100, fmt.Sprintf("下载预编译 %s 驱动代理", displayName))
|
||||
}
|
||||
hash, dlErr := downloadOptionalDriverAgentBinary(a, definition, candidateURL, executablePath)
|
||||
if dlErr == nil {
|
||||
return candidateURL, hash, nil
|
||||
}
|
||||
downloadErrs = append(downloadErrs, fmt.Sprintf("%s: %s", candidateURL, strings.TrimSpace(dlErr.Error())))
|
||||
}
|
||||
hash, dlErr := downloadOptionalDriverAgentBinary(a, definition, candidateURL, executablePath)
|
||||
if dlErr == nil {
|
||||
return candidateURL, hash, nil
|
||||
}
|
||||
downloadErrs = append(downloadErrs, fmt.Sprintf("%s: %s", candidateURL, strings.TrimSpace(dlErr.Error())))
|
||||
}
|
||||
}
|
||||
bundleURLs := resolveOptionalDriverBundleDownloadURLs()
|
||||
if len(bundleURLs) > 0 {
|
||||
for _, bundleURL := range bundleURLs {
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 20, 100, fmt.Sprintf("从驱动总包提取 %s 代理", displayName))
|
||||
bundleURLs := resolveOptionalDriverBundleDownloadURLs()
|
||||
if len(bundleURLs) > 0 {
|
||||
for _, bundleURL := range bundleURLs {
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 20, 100, fmt.Sprintf("从驱动总包提取 %s 代理", displayName))
|
||||
}
|
||||
source, hash, bundleErr := downloadOptionalDriverAgentFromBundle(a, definition, bundleURL, executablePath)
|
||||
if bundleErr == nil {
|
||||
return source, hash, nil
|
||||
}
|
||||
downloadErrs = append(downloadErrs, fmt.Sprintf("%s: %s", bundleURL, strings.TrimSpace(bundleErr.Error())))
|
||||
}
|
||||
source, hash, bundleErr := downloadOptionalDriverAgentFromBundle(a, definition, bundleURL, executablePath)
|
||||
if bundleErr == nil {
|
||||
return source, hash, nil
|
||||
}
|
||||
downloadErrs = append(downloadErrs, fmt.Sprintf("%s: %s", bundleURL, strings.TrimSpace(bundleErr.Error())))
|
||||
}
|
||||
}
|
||||
if a != nil {
|
||||
a.emitDriverDownloadProgress(driverType, "downloading", 92, 100, "未命中预编译包,尝试开发态本地构建")
|
||||
}
|
||||
|
||||
hash, buildErr := buildOptionalDriverAgentFromSource(definition, executablePath)
|
||||
hash, buildErr := buildOptionalDriverAgentFromSource(definition, executablePath, selectedVersion)
|
||||
if buildErr == nil {
|
||||
return fmt.Sprintf("local://go-build/%s-driver-agent", driverType), hash, nil
|
||||
}
|
||||
@@ -2796,6 +2912,10 @@ func downloadOptionalDriverAgentBinary(a *App, definition driverDefinition, urlT
|
||||
if chmodErr := os.Chmod(executablePath, 0o755); chmodErr != nil && stdRuntime.GOOS != "windows" {
|
||||
return "", fmt.Errorf("设置代理权限失败:%w", chmodErr)
|
||||
}
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, executablePath); validateErr != nil {
|
||||
_ = os.Remove(executablePath)
|
||||
return "", validateErr
|
||||
}
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
@@ -2904,6 +3024,10 @@ func downloadOptionalDriverAgentFromBundle(a *App, definition driverDefinition,
|
||||
if chmodErr := os.Chmod(executablePath, 0o755); chmodErr != nil && stdRuntime.GOOS != "windows" {
|
||||
return "", "", fmt.Errorf("设置驱动代理权限失败:%w", chmodErr)
|
||||
}
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, executablePath); validateErr != nil {
|
||||
_ = os.Remove(executablePath)
|
||||
return "", "", validateErr
|
||||
}
|
||||
hash, err := hashFileSHA256(executablePath)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("计算驱动代理摘要失败:%w", err)
|
||||
@@ -2912,7 +3036,7 @@ func downloadOptionalDriverAgentFromBundle(a *App, definition driverDefinition,
|
||||
return source, hash, nil
|
||||
}
|
||||
|
||||
func buildOptionalDriverAgentFromSource(definition driverDefinition, executablePath string) (string, error) {
|
||||
func buildOptionalDriverAgentFromSource(definition driverDefinition, executablePath string, selectedVersion string) (string, error) {
|
||||
driverType := normalizeDriverType(definition.Type)
|
||||
displayName := resolveDriverDisplayName(definition)
|
||||
goPath, lookErr := exec.LookPath("go")
|
||||
@@ -2920,7 +3044,7 @@ func buildOptionalDriverAgentFromSource(definition driverDefinition, executableP
|
||||
return "", fmt.Errorf("当前环境未安装 Go,且未找到可用的 %s 预编译代理包", displayName)
|
||||
}
|
||||
|
||||
tagName, tagErr := optionalDriverBuildTag(driverType)
|
||||
tagName, tagErr := optionalDriverBuildTag(driverType, selectedVersion)
|
||||
if tagErr != nil {
|
||||
return "", tagErr
|
||||
}
|
||||
@@ -2931,6 +3055,7 @@ func buildOptionalDriverAgentFromSource(definition driverDefinition, executableP
|
||||
}
|
||||
cmd := exec.Command(goPath, "build", "-tags", tagName, "-trimpath", "-ldflags", "-s -w", "-o", executablePath, "./cmd/optional-driver-agent")
|
||||
cmd.Dir = projectRoot
|
||||
cmd.Env = append(os.Environ(), "GOTOOLCHAIN=auto")
|
||||
output, buildErr := cmd.CombinedOutput()
|
||||
if buildErr != nil {
|
||||
return "", fmt.Errorf("构建 %s 驱动代理失败:%v,输出:%s", displayName, buildErr, strings.TrimSpace(string(output)))
|
||||
@@ -2945,7 +3070,31 @@ func buildOptionalDriverAgentFromSource(definition driverDefinition, executableP
|
||||
return hash, nil
|
||||
}
|
||||
|
||||
func optionalDriverBuildTag(driverType string) (string, error) {
|
||||
func resolveMongoDriverMajorFromVersion(version string) int {
|
||||
trimmed := strings.TrimSpace(version)
|
||||
trimmed = strings.TrimPrefix(trimmed, "v")
|
||||
if strings.HasPrefix(trimmed, "1.") || trimmed == "1" {
|
||||
return 1
|
||||
}
|
||||
return 2
|
||||
}
|
||||
|
||||
func shouldForceSourceBuildForVersion(driverType string, selectedVersion string) bool {
|
||||
if normalizeDriverType(driverType) != "mongodb" {
|
||||
return false
|
||||
}
|
||||
return resolveMongoDriverMajorFromVersion(selectedVersion) == 1
|
||||
}
|
||||
|
||||
func shouldSkipReusableAgentCandidate(driverType string, selectedVersion string) bool {
|
||||
if normalizeDriverType(driverType) != "mongodb" {
|
||||
return false
|
||||
}
|
||||
_ = selectedVersion
|
||||
return true
|
||||
}
|
||||
|
||||
func optionalDriverBuildTag(driverType string, selectedVersion string) (string, error) {
|
||||
switch normalizeDriverType(driverType) {
|
||||
case "mysql":
|
||||
return "gonavi_mysql_driver", nil
|
||||
@@ -2970,6 +3119,9 @@ func optionalDriverBuildTag(driverType string) (string, error) {
|
||||
case "vastbase":
|
||||
return "gonavi_vastbase_driver", nil
|
||||
case "mongodb":
|
||||
if resolveMongoDriverMajorFromVersion(selectedVersion) == 1 {
|
||||
return "gonavi_mongodb_driver_v1", nil
|
||||
}
|
||||
return "gonavi_mongodb_driver", nil
|
||||
case "tdengine":
|
||||
return "gonavi_tdengine_driver", nil
|
||||
@@ -3201,6 +3353,7 @@ func resolveOptionalDriverAgentDownloadURLs(definition driverDefinition, rawURL
|
||||
}
|
||||
|
||||
func findExistingOptionalDriverAgentCandidate(definition driverDefinition, targetPath string) (string, bool) {
|
||||
driverType := normalizeDriverType(definition.Type)
|
||||
targetAbs, _ := filepath.Abs(targetPath)
|
||||
candidates := resolveOptionalDriverAgentCandidatePaths(definition)
|
||||
for _, candidate := range candidates {
|
||||
@@ -3216,9 +3369,13 @@ func findExistingOptionalDriverAgentCandidate(definition driverDefinition, targe
|
||||
continue
|
||||
}
|
||||
info, statErr := os.Stat(absPath)
|
||||
if statErr == nil && !info.IsDir() {
|
||||
return absPath, true
|
||||
if statErr != nil || info.IsDir() {
|
||||
continue
|
||||
}
|
||||
if validateErr := db.ValidateOptionalDriverAgentExecutable(driverType, absPath); validateErr != nil {
|
||||
continue
|
||||
}
|
||||
return absPath, true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
@@ -3310,6 +3467,30 @@ func resolveDriverDisplayName(definition driverDefinition) string {
|
||||
return "未知"
|
||||
}
|
||||
|
||||
func activateOptionalDriverAgentBinary(installPath string, runtimePath string) error {
|
||||
source := strings.TrimSpace(installPath)
|
||||
target := strings.TrimSpace(runtimePath)
|
||||
if source == "" || target == "" {
|
||||
return fmt.Errorf("agent path is empty")
|
||||
}
|
||||
if source == target {
|
||||
return nil
|
||||
}
|
||||
|
||||
absSource := source
|
||||
absTarget := target
|
||||
if value, err := filepath.Abs(source); err == nil && strings.TrimSpace(value) != "" {
|
||||
absSource = value
|
||||
}
|
||||
if value, err := filepath.Abs(target); err == nil && strings.TrimSpace(value) != "" {
|
||||
absTarget = value
|
||||
}
|
||||
if strings.EqualFold(absSource, absTarget) {
|
||||
return nil
|
||||
}
|
||||
return copyAgentBinary(source, target)
|
||||
}
|
||||
|
||||
func copyAgentBinary(sourcePath, targetPath string) error {
|
||||
src, err := os.Open(sourcePath)
|
||||
if err != nil {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"html"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -1595,6 +1596,26 @@ func writeRowsToFile(f *os.File, data []map[string]interface{}, columns []string
|
||||
return writeRowsToXlsx(f.Name(), data, columns)
|
||||
}
|
||||
|
||||
// html 使用内嵌 CSS 输出可直接浏览器预览的独立页面
|
||||
if format == "html" {
|
||||
return writeRowsToHTML(f, data, columns)
|
||||
}
|
||||
|
||||
// 如果列名为空但数据不为空,从所有数据行提取所有键
|
||||
if len(columns) == 0 && len(data) > 0 {
|
||||
keySet := make(map[string]bool)
|
||||
for _, row := range data {
|
||||
for key := range row {
|
||||
keySet[key] = true
|
||||
}
|
||||
}
|
||||
// 排序以确保输出一致
|
||||
for key := range keySet {
|
||||
columns = append(columns, key)
|
||||
}
|
||||
sort.Strings(columns)
|
||||
}
|
||||
|
||||
var csvWriter *csv.Writer
|
||||
var jsonEncoder *json.Encoder
|
||||
isJsonFirstRow := true
|
||||
@@ -1688,6 +1709,188 @@ func writeRowsToFile(f *os.File, data []map[string]interface{}, columns []string
|
||||
return nil
|
||||
}
|
||||
|
||||
func formatExportHTMLCell(val interface{}) string {
|
||||
text := formatExportCellText(val)
|
||||
escaped := html.EscapeString(text)
|
||||
escaped = strings.ReplaceAll(escaped, "\r\n", "\n")
|
||||
escaped = strings.ReplaceAll(escaped, "\r", "\n")
|
||||
return strings.ReplaceAll(escaped, "\n", "<br>")
|
||||
}
|
||||
|
||||
func writeRowsToHTML(f *os.File, data []map[string]interface{}, columns []string) error {
|
||||
w := bufio.NewWriterSize(f, 1024*256)
|
||||
|
||||
if _, err := w.WriteString(`<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>GoNavi Export</title>
|
||||
<style>
|
||||
:root {
|
||||
color-scheme: light;
|
||||
--bg: #f8f9fa;
|
||||
--card: #ffffff;
|
||||
--line: #dee2e6;
|
||||
--text: #212529;
|
||||
--muted: #6c757d;
|
||||
--hover: #f1f3f5;
|
||||
--zebra: #f8f9fa;
|
||||
--head: #ffffff;
|
||||
}
|
||||
* { box-sizing: border-box; }
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 24px;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", "PingFang SC", "Microsoft YaHei", sans-serif;
|
||||
line-height: 1.6;
|
||||
}
|
||||
.export-wrap {
|
||||
max-width: 100%;
|
||||
margin: 0 auto;
|
||||
background: var(--card);
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.export-head {
|
||||
padding: 16px 20px;
|
||||
background: var(--head);
|
||||
border-bottom: 2px solid var(--line);
|
||||
}
|
||||
.export-head h1 {
|
||||
margin: 0;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
}
|
||||
.export-meta {
|
||||
margin-top: 6px;
|
||||
color: var(--muted);
|
||||
font-size: 13px;
|
||||
}
|
||||
.table-wrap {
|
||||
width: 100%;
|
||||
overflow: auto;
|
||||
padding: 16px;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: auto;
|
||||
font-size: 13px;
|
||||
}
|
||||
thead th {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 2;
|
||||
background: var(--head);
|
||||
text-align: left;
|
||||
font-weight: 600;
|
||||
white-space: nowrap;
|
||||
border-bottom: 2px solid var(--line);
|
||||
color: var(--text);
|
||||
padding: 12px 16px;
|
||||
}
|
||||
td {
|
||||
padding: 10px 16px;
|
||||
border-bottom: 1px solid var(--line);
|
||||
vertical-align: top;
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
overflow-wrap: anywhere;
|
||||
max-width: 500px;
|
||||
color: var(--text);
|
||||
}
|
||||
tbody tr:nth-child(even) {
|
||||
background: var(--zebra);
|
||||
}
|
||||
tbody tr:hover {
|
||||
background: var(--hover);
|
||||
}
|
||||
td.empty {
|
||||
text-align: center;
|
||||
color: var(--muted);
|
||||
font-style: italic;
|
||||
}
|
||||
@media (max-width: 768px) {
|
||||
body { padding: 16px; }
|
||||
.export-head { padding: 12px 16px; }
|
||||
.table-wrap { padding: 12px; }
|
||||
th, td { padding: 8px 12px; font-size: 12px; }
|
||||
}
|
||||
@media print {
|
||||
body { background: white; padding: 0; }
|
||||
.export-wrap { border: none; }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="export-wrap">
|
||||
<div class="export-head">
|
||||
<h1>GoNavi Data Export</h1>
|
||||
<div class="export-meta">`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := fmt.Fprintf(w, "Rows: %d · Columns: %d · Generated: %s", len(data), len(columns), time.Now().Format("2006-01-02 15:04:05")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := w.WriteString(`</div>
|
||||
</div>
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead><tr>`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, col := range columns {
|
||||
if _, err := fmt.Fprintf(w, "<th>%s</th>", html.EscapeString(col)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.WriteString(`</tr></thead><tbody>`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(data) == 0 {
|
||||
colspan := len(columns)
|
||||
if colspan <= 0 {
|
||||
colspan = 1
|
||||
}
|
||||
if _, err := fmt.Fprintf(w, `<tr><td class="empty" colspan="%d">(0 rows)</td></tr>`, colspan); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
for _, rowMap := range data {
|
||||
if _, err := w.WriteString("<tr>"); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, col := range columns {
|
||||
if _, err := fmt.Fprintf(w, "<td>%s</td>", formatExportHTMLCell(rowMap[col])); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if _, err := w.WriteString("</tr>"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.WriteString(`</tbody></table>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func formatExportCellText(val interface{}) string {
|
||||
if val == nil {
|
||||
return "NULL"
|
||||
|
||||
@@ -203,3 +203,73 @@ func TestGetExportQueryTimeout_CustomClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
t.Fatalf("custom clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeAndStyle(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{
|
||||
"name": "<script>alert(1)</script>",
|
||||
"note": "line1\nline2",
|
||||
"nullable": nil,
|
||||
},
|
||||
}
|
||||
columns := []string{"name", "note", "nullable"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 html 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
|
||||
if !strings.Contains(content, "<!DOCTYPE html>") {
|
||||
t.Fatalf("html 导出缺少 doctype: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "position: sticky") {
|
||||
t.Fatalf("html 导出缺少表头吸顶样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "tbody tr:nth-child(even)") {
|
||||
t.Fatalf("html 导出缺少斑马纹样式: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出未进行 XSS 转义: %s", content)
|
||||
}
|
||||
if strings.Contains(content, "<script>alert(1)</script>") {
|
||||
t.Fatalf("html 导出包含未转义脚本: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "line1<br>line2") {
|
||||
t.Fatalf("html 导出换行未转为 <br>: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "<td>NULL</td>") {
|
||||
t.Fatalf("html 导出空值显示异常: %s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_HTML_EscapeHeader(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.html")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
columnName := "<b>name</b>"
|
||||
data := []map[string]interface{}{{columnName: "ok"}}
|
||||
if err := writeRowsToFile(f, data, []string{columnName}, "html"); err != nil {
|
||||
t.Fatalf("写入 html 失败: %v", err)
|
||||
}
|
||||
contentBytes, _ := os.ReadFile(f.Name())
|
||||
content := string(contentBytes)
|
||||
if !strings.Contains(content, "<th><b>name</b></th>") || strings.Contains(content, "<th><b>name</b></th>") {
|
||||
t.Fatalf("html 表头未正确转义: %s", content)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,12 +23,20 @@ var (
|
||||
|
||||
// getRedisClient gets or creates a Redis client from cache
|
||||
func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisClient, error) {
|
||||
key := getRedisClientCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "Redis 代理准备失败:%s", formatRedisConnSummary(effectiveConfig))
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
key := getRedisClientCacheKey(connectConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
|
||||
redisCacheMu.Lock()
|
||||
defer redisCacheMu.Unlock()
|
||||
@@ -47,21 +55,20 @@ func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisCli
|
||||
|
||||
logger.Infof("创建 Redis 客户端实例:缓存Key=%s", shortKey)
|
||||
client := redis.NewRedisClient()
|
||||
if err := client.Connect(config); err != nil {
|
||||
logger.Error(err, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
return nil, err
|
||||
if err := client.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
redisCache[key] = client
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(effectiveConfig), shortKey)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func getRedisClientCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
}
|
||||
b, _ := json.Marshal(config)
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
@@ -91,6 +98,26 @@ func formatRedisConnSummary(config connection.ConnectionConfig) string {
|
||||
b.WriteString(" 用户=")
|
||||
b.WriteString(config.SSH.User)
|
||||
}
|
||||
if config.UseProxy {
|
||||
b.WriteString(" 代理=")
|
||||
b.WriteString(strings.ToLower(strings.TrimSpace(config.Proxy.Type)))
|
||||
b.WriteString("://")
|
||||
b.WriteString(config.Proxy.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.Proxy.Port))
|
||||
if strings.TrimSpace(config.Proxy.User) != "" {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
if config.UseHTTPTunnel {
|
||||
b.WriteString(" HTTP隧道=")
|
||||
b.WriteString(strings.TrimSpace(config.HTTPTunnel.Host))
|
||||
b.WriteString(":")
|
||||
b.WriteString(strconv.Itoa(config.HTTPTunnel.Port))
|
||||
if strings.TrimSpace(config.HTTPTunnel.User) != "" {
|
||||
b.WriteString(" HTTP隧道认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
@@ -51,12 +51,13 @@ type UpdateInfo struct {
|
||||
}
|
||||
|
||||
type AppInfo struct {
|
||||
Version string `json:"version"`
|
||||
Author string `json:"author"`
|
||||
RepoURL string `json:"repoUrl,omitempty"`
|
||||
IssueURL string `json:"issueUrl,omitempty"`
|
||||
ReleaseURL string `json:"releaseUrl,omitempty"`
|
||||
BuildTime string `json:"buildTime,omitempty"`
|
||||
Version string `json:"version"`
|
||||
Author string `json:"author"`
|
||||
RepoURL string `json:"repoUrl,omitempty"`
|
||||
IssueURL string `json:"issueUrl,omitempty"`
|
||||
ReleaseURL string `json:"releaseUrl,omitempty"`
|
||||
CommunityURL string `json:"communityUrl,omitempty"`
|
||||
BuildTime string `json:"buildTime,omitempty"`
|
||||
}
|
||||
|
||||
type updateDownloadResult struct {
|
||||
@@ -137,12 +138,13 @@ func (a *App) CheckForUpdates() connection.QueryResult {
|
||||
|
||||
func (a *App) GetAppInfo() connection.QueryResult {
|
||||
info := AppInfo{
|
||||
Version: getCurrentVersion(),
|
||||
Author: getCurrentAuthor(),
|
||||
RepoURL: "https://github.com/" + updateRepo,
|
||||
IssueURL: "https://github.com/" + updateRepo + "/issues",
|
||||
ReleaseURL: "https://github.com/" + updateRepo + "/releases",
|
||||
BuildTime: strings.TrimSpace(AppBuildTime),
|
||||
Version: getCurrentVersion(),
|
||||
Author: getCurrentAuthor(),
|
||||
RepoURL: "https://github.com/" + updateRepo,
|
||||
IssueURL: "https://github.com/" + updateRepo + "/issues",
|
||||
ReleaseURL: "https://github.com/" + updateRepo + "/releases",
|
||||
CommunityURL: "https://aibook.ren",
|
||||
BuildTime: strings.TrimSpace(AppBuildTime),
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: info}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,66 @@ import (
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func leadingSQLKeyword(query string) string {
|
||||
text := strings.TrimSpace(query)
|
||||
for len(text) > 0 {
|
||||
trimmed := strings.TrimLeft(text, " \t\r\n")
|
||||
if trimmed == "" {
|
||||
return ""
|
||||
}
|
||||
text = trimmed
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(text, "--"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "#"):
|
||||
if idx := strings.IndexByte(text, '\n'); idx >= 0 {
|
||||
text = text[idx+1:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
case strings.HasPrefix(text, "/*"):
|
||||
if idx := strings.Index(text, "*/"); idx >= 0 {
|
||||
text = text[idx+2:]
|
||||
continue
|
||||
}
|
||||
return ""
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if text == "" {
|
||||
return ""
|
||||
}
|
||||
for i, r := range text {
|
||||
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' {
|
||||
continue
|
||||
}
|
||||
if i == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.ToLower(text[:i])
|
||||
}
|
||||
return strings.ToLower(text)
|
||||
}
|
||||
|
||||
func isReadOnlySQLQuery(dbType string, query string) bool {
|
||||
if strings.ToLower(strings.TrimSpace(dbType)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
return true
|
||||
}
|
||||
|
||||
switch leadingSQLKeyword(query) {
|
||||
case "select", "with", "show", "describe", "desc", "explain", "pragma", "values":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func sanitizeSQLForPgLike(dbType string, query string) string {
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
|
||||
@@ -18,35 +18,49 @@ type ProxyConfig struct {
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// HTTPTunnelConfig holds independent HTTP CONNECT tunnel details
|
||||
type HTTPTunnelConfig struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user,omitempty"`
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// ConnectionConfig holds database connection details including SSH
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica | cluster
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSL bool `json:"useSSL,omitempty"` // MySQL-like SSL/TLS switch
|
||||
SSLMode string `json:"sslMode,omitempty"` // preferred | required | skip-verify | disable
|
||||
SSLCertPath string `json:"sslCertPath,omitempty"` // TLS client certificate path (e.g., Dameng)
|
||||
SSLKeyPath string `json:"sslKeyPath,omitempty"` // TLS client private key path (e.g., Dameng)
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
UseHTTPTunnel bool `json:"useHttpTunnel,omitempty"`
|
||||
HTTPTunnel HTTPTunnelConfig `json:"httpTunnel,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica | cluster
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
@@ -55,6 +69,7 @@ type QueryResult struct {
|
||||
Message string `json:"message"`
|
||||
Data interface{} `json:"data"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
QueryID string `json:"queryId,omitempty"` // Unique ID for query cancellation
|
||||
}
|
||||
|
||||
// ColumnDefinition represents a table column
|
||||
|
||||
@@ -107,7 +107,9 @@ func (c *ClickHouseDB) buildClickHouseOptions(config connection.ConnectionConfig
|
||||
if readTimeout < minClickHouseReadTimeout {
|
||||
readTimeout = minClickHouseReadTimeout
|
||||
}
|
||||
return &clickhouse.Options{
|
||||
protocol := detectClickHouseProtocol(config)
|
||||
opts := &clickhouse.Options{
|
||||
Protocol: protocol,
|
||||
Addr: []string{
|
||||
net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
},
|
||||
@@ -119,6 +121,50 @@ func (c *ClickHouseDB) buildClickHouseOptions(config connection.ConnectionConfig
|
||||
DialTimeout: connectTimeout,
|
||||
ReadTimeout: readTimeout,
|
||||
}
|
||||
if tlsConfig := resolveGenericTLSConfig(config); tlsConfig != nil {
|
||||
opts.TLS = tlsConfig
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func detectClickHouseProtocol(config connection.ConnectionConfig) clickhouse.Protocol {
|
||||
uriText := strings.ToLower(strings.TrimSpace(config.URI))
|
||||
if strings.HasPrefix(uriText, "http://") || strings.HasPrefix(uriText, "https://") {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
if config.Port == 8123 || config.Port == 8443 {
|
||||
return clickhouse.HTTP
|
||||
}
|
||||
return clickhouse.Native
|
||||
}
|
||||
|
||||
func isClickHouseProtocolMismatch(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
text := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
return strings.Contains(text, "unexpected packet [72]") ||
|
||||
(strings.Contains(text, "unexpected packet") && strings.Contains(text, "handshake")) ||
|
||||
strings.Contains(text, "http response to https client") ||
|
||||
strings.Contains(text, "malformed http response")
|
||||
}
|
||||
|
||||
func withClickHouseProtocol(config connection.ConnectionConfig, protocol clickhouse.Protocol) connection.ConnectionConfig {
|
||||
next := config
|
||||
switch protocol {
|
||||
case clickhouse.HTTP:
|
||||
if next.Port == 0 {
|
||||
next.Port = 8123
|
||||
}
|
||||
default:
|
||||
if next.Port == 0 {
|
||||
next.Port = defaultClickHousePort
|
||||
}
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Connect(config connection.ConnectionConfig) error {
|
||||
@@ -165,13 +211,48 @@ func (c *ClickHouseDB) Connect(config connection.ConnectionConfig) error {
|
||||
logger.Infof("ClickHouse 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
c.conn = clickhouse.OpenDB(c.buildClickHouseOptions(runConfig))
|
||||
|
||||
if err := c.Ping(); err != nil {
|
||||
_ = c.Close()
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
return nil
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
primaryProtocol := detectClickHouseProtocol(attempt)
|
||||
protocols := []clickhouse.Protocol{primaryProtocol}
|
||||
if primaryProtocol == clickhouse.Native {
|
||||
protocols = append(protocols, clickhouse.HTTP)
|
||||
} else {
|
||||
protocols = append(protocols, clickhouse.Native)
|
||||
}
|
||||
|
||||
for pIdx, protocol := range protocols {
|
||||
protocolConfig := withClickHouseProtocol(attempt, protocol)
|
||||
c.conn = clickhouse.OpenDB(c.buildClickHouseOptions(protocolConfig))
|
||||
if err := c.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败(protocol=%s): %v", idx+1, protocol.String(), err))
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
if pIdx == 0 && !isClickHouseProtocolMismatch(err) {
|
||||
// 首次连接不是协议误配特征,避免无谓重试次协议。
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("ClickHouse SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
if pIdx > 0 {
|
||||
logger.Warnf("ClickHouse 已自动切换连接协议为 %s(常见于 8123/8443 HTTP 端口)", protocol.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
_ = c.Close()
|
||||
return fmt.Errorf("连接建立后验证失败(可检查 ClickHouse 端口与协议是否匹配:Native=9000/9440,HTTP=8123/8443):%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Close() error {
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -36,6 +37,14 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
if config.Database != "" {
|
||||
q.Set("schema", config.Database)
|
||||
}
|
||||
if config.UseSSL {
|
||||
if certPath := strings.TrimSpace(config.SSLCertPath); certPath != "" {
|
||||
q.Set("SSL_CERT_PATH", certPath)
|
||||
}
|
||||
if keyPath := strings.TrimSpace(config.SSLKeyPath); keyPath != "" {
|
||||
q.Set("SSL_KEY_PATH", keyPath)
|
||||
}
|
||||
}
|
||||
if escapedPassword != config.Password {
|
||||
// 达梦驱动要求:密码包含特殊字符时,password 需 PathEscape,并添加 escapeProcess=true 让驱动解码。
|
||||
q.Set("escapeProcess", "true")
|
||||
@@ -50,8 +59,12 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
if runConfig.UseSSL {
|
||||
if strings.TrimSpace(runConfig.SSLCertPath) == "" || strings.TrimSpace(runConfig.SSLKeyPath) == "" {
|
||||
return fmt.Errorf("达梦启用 SSL 需要同时配置证书路径(sslCertPath)与私钥路径(sslKeyPath)")
|
||||
}
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -80,22 +93,37 @@ func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = d.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("达梦数据库通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = d.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(config)
|
||||
if err := d.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := d.getDSN(attempt)
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := d.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
d.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("达梦 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (d *DamengDB) Close() error {
|
||||
@@ -177,24 +205,82 @@ func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetDatabases() ([]string, error) {
|
||||
// DM: List Users/Schemas
|
||||
data, _, err := d.Query("SELECT username FROM dba_users")
|
||||
if err != nil {
|
||||
// Fallback if dba_users not accessible
|
||||
data, _, err = d.Query("SELECT username FROM all_users")
|
||||
// 达梦将「用户/模式」作为数据库列表来源,不同权限下可见口径不同。
|
||||
// 这里采用多查询口径聚合,避免仅依赖单一视图导致“少库”。
|
||||
queries := []string{
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM SYS.DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM DBA_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM ALL_USERS ORDER BY USERNAME",
|
||||
"SELECT USERNAME AS DATABASE_NAME FROM USER_USERS",
|
||||
"SELECT DISTINCT OWNER AS DATABASE_NAME FROM ALL_TABLES ORDER BY OWNER",
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{})
|
||||
dbs := make([]string, 0, 64)
|
||||
var lastErr error
|
||||
success := false
|
||||
|
||||
for _, q := range queries {
|
||||
data, _, err := d.Query(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
success = true
|
||||
for _, row := range data {
|
||||
name := getDamengRowString(row, "DATABASE_NAME", "USERNAME", "OWNER", "SCHEMA_NAME")
|
||||
if name == "" {
|
||||
// 回退到第一列,兼容驱动返回列名差异。
|
||||
for _, v := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
continue
|
||||
}
|
||||
name = text
|
||||
break
|
||||
}
|
||||
}
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
key := strings.ToUpper(name)
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
dbs = append(dbs, name)
|
||||
}
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["USERNAME"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
|
||||
if !success && lastErr != nil {
|
||||
return nil, lastErr
|
||||
}
|
||||
|
||||
sort.Slice(dbs, func(i, j int) bool {
|
||||
return strings.ToUpper(dbs[i]) < strings.ToUpper(dbs[j])
|
||||
})
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func getDamengRowString(row map[string]interface{}, keys ...string) string {
|
||||
if len(row) == 0 {
|
||||
return ""
|
||||
}
|
||||
for _, key := range keys {
|
||||
for k, v := range row {
|
||||
if !strings.EqualFold(strings.TrimSpace(k), strings.TrimSpace(key)) {
|
||||
continue
|
||||
}
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", v))
|
||||
if text == "" || strings.EqualFold(text, "<nil>") {
|
||||
return ""
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
query := fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
if dbName == "" {
|
||||
|
||||
@@ -151,9 +151,10 @@ func (d *DirosDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
tlsMode := resolveMySQLTLSMode(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds&tls=%s",
|
||||
config.User, config.Password, protocol, address, database, timeout, url.QueryEscape(tlsMode))
|
||||
}
|
||||
|
||||
func resolveDirosCredential(config connection.ConnectionConfig, addressIndex int) (string, string) {
|
||||
|
||||
74
internal/db/driver_agent_binary_check.go
Normal file
74
internal/db/driver_agent_binary_check.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"debug/pe"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
peMachineI386 uint16 = 0x014c
|
||||
peMachineAmd64 uint16 = 0x8664
|
||||
peMachineArm64 uint16 = 0xaa64
|
||||
)
|
||||
|
||||
func windowsMachineLabel(machine uint16) string {
|
||||
switch machine {
|
||||
case peMachineI386:
|
||||
return "windows-386"
|
||||
case peMachineAmd64:
|
||||
return "windows-amd64"
|
||||
case peMachineArm64:
|
||||
return "windows-arm64"
|
||||
default:
|
||||
return fmt.Sprintf("windows-unknown(0x%04x)", machine)
|
||||
}
|
||||
}
|
||||
|
||||
func expectedWindowsMachineForGoArch(goarch string) (uint16, string, bool) {
|
||||
switch strings.ToLower(strings.TrimSpace(goarch)) {
|
||||
case "386":
|
||||
return peMachineI386, "windows-386", true
|
||||
case "amd64":
|
||||
return peMachineAmd64, "windows-amd64", true
|
||||
case "arm64":
|
||||
return peMachineArm64, "windows-arm64", true
|
||||
default:
|
||||
return 0, "", false
|
||||
}
|
||||
}
|
||||
|
||||
func validateWindowsExecutableMachine(pathText string) error {
|
||||
file, err := pe.Open(pathText)
|
||||
if err != nil {
|
||||
return fmt.Errorf("无法识别为有效的 Windows 可执行文件:%w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
expectedMachine, expectedLabel, ok := expectedWindowsMachineForGoArch(runtime.GOARCH)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
actualMachine := file.FileHeader.Machine
|
||||
if actualMachine != expectedMachine {
|
||||
return fmt.Errorf("可执行文件架构不兼容(文件=%s,当前进程=%s)", windowsMachineLabel(actualMachine), expectedLabel)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateOptionalDriverAgentExecutable 校验可选驱动代理二进制是否可在当前进程中执行。
|
||||
// 当前主要用于 Windows 下的 PE 架构兼容性校验,避免升级后复用到错误架构的旧代理。
|
||||
func ValidateOptionalDriverAgentExecutable(driverType string, executablePath string) error {
|
||||
pathText := strings.TrimSpace(executablePath)
|
||||
if pathText == "" {
|
||||
return fmt.Errorf("%s 驱动代理路径为空", driverDisplayName(driverType))
|
||||
}
|
||||
if runtime.GOOS != "windows" {
|
||||
return nil
|
||||
}
|
||||
if err := validateWindowsExecutableMachine(pathText); err != nil {
|
||||
return fmt.Errorf("%s 驱动代理不可用:%w", driverDisplayName(driverType), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -194,6 +194,9 @@ func optionalGoDriverRuntimeReady(driverType string) (bool, string) {
|
||||
if statErr != nil || info.IsDir() {
|
||||
return false, fmt.Sprintf("%s 驱动代理缺失,请在驱动管理中重新安装启用", driverDisplayName(normalized))
|
||||
}
|
||||
if validateErr := ValidateOptionalDriverAgentExecutable(normalized, executablePath); validateErr != nil {
|
||||
return false, fmt.Sprintf("%s;请在驱动管理中重新安装启用", validateErr.Error())
|
||||
}
|
||||
return true, ""
|
||||
}
|
||||
|
||||
|
||||
@@ -65,11 +65,22 @@ func TestManagedDriverRequiresInstallMarker(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("解析 mariadb 代理路径失败: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(executablePath, []byte("placeholder"), 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位文件失败: %v", err)
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
_ = os.Chmod(executablePath, 0o644)
|
||||
selfPath, selfErr := os.Executable()
|
||||
if selfErr != nil {
|
||||
t.Fatalf("获取测试进程路径失败: %v", selfErr)
|
||||
}
|
||||
content, readErr := os.ReadFile(selfPath)
|
||||
if readErr != nil {
|
||||
t.Fatalf("读取测试进程失败: %v", readErr)
|
||||
}
|
||||
if err := os.WriteFile(executablePath, content, 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位可执行文件失败: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err := os.WriteFile(executablePath, []byte("placeholder"), 0o755); err != nil {
|
||||
t.Fatalf("写入 mariadb 代理占位文件失败: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
supported, reason := DriverRuntimeSupportStatus("mariadb")
|
||||
|
||||
@@ -33,6 +33,44 @@ func TestPostgresDSN_EscapesPassword(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestPostgresDSN_SSLModeRequireWhenEnabled(t *testing.T) {
|
||||
p := &PostgresDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5432,
|
||||
User: "user",
|
||||
Password: "pass",
|
||||
Database: "db",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := p.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "sslmode=require") {
|
||||
t.Fatalf("dsn 缺少 sslmode=require 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMySQLDSN_UsesTLSParamWhenSSLEnabled(t *testing.T) {
|
||||
m := &MySQLDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Password: "pass",
|
||||
Database: "db",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := m.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "tls=true") {
|
||||
t.Fatalf("dsn 缺少 tls=true 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOracleDSN_EscapesUserAndPassword(t *testing.T) {
|
||||
o := &OracleDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
@@ -82,6 +120,30 @@ func TestDamengDSN_EscapesPasswordAndEnablesEscapeProcess(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestDamengDSN_AppendsSSLCertAndKeyParams(t *testing.T) {
|
||||
d := &DamengDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "dameng",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5236,
|
||||
User: "SYSDBA",
|
||||
Password: "pass",
|
||||
Database: "DBName",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
SSLCertPath: "C:\\certs\\client-cert.pem",
|
||||
SSLKeyPath: "C:\\certs\\client-key.pem",
|
||||
}
|
||||
|
||||
dsn := d.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "SSL_CERT_PATH=") {
|
||||
t.Fatalf("dsn 缺少 SSL_CERT_PATH 参数:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "SSL_KEY_PATH=") {
|
||||
t.Fatalf("dsn 缺少 SSL_KEY_PATH 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKingbaseDSN_QuotesPasswordWithSpaces(t *testing.T) {
|
||||
k := &KingbaseDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
@@ -116,6 +178,47 @@ func TestTDengineDSN_UsesWebSocketFormat(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestTDengineDSN_UsesSecureWebSocketWhenSSLEnabled(t *testing.T) {
|
||||
td := &TDengineDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "tdengine",
|
||||
Host: "127.0.0.1",
|
||||
Port: 6041,
|
||||
User: "root",
|
||||
Password: "taosdata",
|
||||
Database: "power",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := td.getDSN(cfg)
|
||||
if !strings.HasPrefix(dsn, "root:taosdata@wss(127.0.0.1:6041)/power") {
|
||||
t.Fatalf("tdengine ssl dsn 格式不正确:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSQLServerDSN_EncryptMapping(t *testing.T) {
|
||||
s := &SqlServerDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "sqlserver",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1433,
|
||||
User: "sa",
|
||||
Password: "pass",
|
||||
Database: "master",
|
||||
UseSSL: true,
|
||||
SSLMode: "required",
|
||||
}
|
||||
|
||||
dsn := s.getDSN(cfg)
|
||||
if !strings.Contains(strings.ToLower(dsn), "encrypt=true") {
|
||||
t.Fatalf("sqlserver dsn 缺少 encrypt=true:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(strings.ToLower(dsn), "trustservercertificate=false") {
|
||||
t.Fatalf("sqlserver dsn 缺少 TrustServerCertificate=false:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClickHouseOptions_UsesStructuredTimeoutAndAuth(t *testing.T) {
|
||||
c := &ClickHouseDB{}
|
||||
cfg := normalizeClickHouseConfig(connection.ConnectionConfig{
|
||||
|
||||
@@ -42,7 +42,7 @@ func (h *HighGoDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("sslmode", resolvePostgresSSLMode(config))
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
@@ -50,7 +50,7 @@ func (h *HighGoDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("HighGo 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
@@ -76,23 +76,37 @@ func (h *HighGoDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = h.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("HighGo 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = h.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("highgo", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
h.conn = db
|
||||
h.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := h.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := h.getDSN(attempt)
|
||||
db, err := sql.Open("highgo", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
h.conn = db
|
||||
h.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := h.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
h.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("HighGo SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Close() error {
|
||||
|
||||
@@ -65,12 +65,13 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
port := config.Port
|
||||
|
||||
// Construct DSN
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d",
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=%s connect_timeout=%d",
|
||||
quoteConnValue(address),
|
||||
port,
|
||||
quoteConnValue(config.User),
|
||||
quoteConnValue(config.Password),
|
||||
quoteConnValue(config.Database),
|
||||
quoteConnValue(resolvePostgresSSLMode(config)),
|
||||
getConnectTimeoutSeconds(config),
|
||||
)
|
||||
|
||||
@@ -78,8 +79,7 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -108,23 +108,37 @@ func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = k.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("人大金仓通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = k.getDSN(config)
|
||||
}
|
||||
|
||||
// Open using "kingbase" driver
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
k.conn = db
|
||||
k.pingTimeout = getConnectTimeout(config)
|
||||
if err := k.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := k.getDSN(attempt)
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
k.conn = db
|
||||
k.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := k.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
k.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("人大金仓 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Close() error {
|
||||
@@ -291,10 +305,30 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s' AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(schema), esc(table))
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = '%s'
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -307,11 +341,21 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
@@ -333,10 +377,30 @@ func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection
|
||||
}
|
||||
|
||||
// 使用 current_schema() 获取当前schema
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = current_schema() AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(table))
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = current_schema()
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -349,11 +413,21 @@ func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
@@ -609,28 +683,16 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
schema, table := splitKingbaseQualifiedTable(tableName)
|
||||
if table == "" {
|
||||
return fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteKingbaseIdent(schema), quoteKingbaseIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
qualifiedTable = quoteKingbaseIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
@@ -640,7 +702,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
@@ -648,7 +710,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
return fmt.Errorf("delete error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -660,7 +722,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
@@ -671,7 +733,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteKingbaseIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
@@ -681,7 +743,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
return fmt.Errorf("update error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -694,7 +756,7 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
cols = append(cols, quoteKingbaseIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
@@ -705,13 +767,74 @@ func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
return fmt.Errorf("insert error: %v; sql=%s", err, query)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func normalizeKingbaseIdentifier(raw string) string {
|
||||
value := strings.TrimSpace(raw)
|
||||
if value == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// 兼容 JSON/字符串转义后传入的标识符:\"schema\" -> "schema"
|
||||
value = strings.ReplaceAll(value, `\"`, `"`)
|
||||
value = strings.TrimSpace(value)
|
||||
|
||||
// 兼容异常多重包裹引号(例如 ""schema""、""""schema"""")。
|
||||
// strings.Trim 会移除两端连续引号,迭代后可收敛到纯标识符。
|
||||
for i := 0; i < 4; i++ {
|
||||
next := strings.TrimSpace(strings.Trim(value, `"`))
|
||||
if next == value {
|
||||
break
|
||||
}
|
||||
value = next
|
||||
}
|
||||
|
||||
// 兼容其他方言可能残留的引用形式
|
||||
if len(value) >= 2 && strings.HasPrefix(value, "`") && strings.HasSuffix(value, "`") {
|
||||
value = strings.TrimSpace(strings.Trim(value, "`"))
|
||||
}
|
||||
if len(value) >= 2 && strings.HasPrefix(value, "[") && strings.HasSuffix(value, "]") {
|
||||
value = strings.TrimSpace(value[1 : len(value)-1])
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func quoteKingbaseIdent(name string) string {
|
||||
n := normalizeKingbaseIdentifier(name)
|
||||
n = strings.ReplaceAll(n, `"`, `""`)
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
func splitKingbaseQualifiedTable(tableName string) (schema string, table string) {
|
||||
raw := strings.TrimSpace(tableName)
|
||||
if raw == "" {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(raw, ".", 2); len(parts) == 2 {
|
||||
schema = normalizeKingbaseIdentifier(parts[0])
|
||||
table = normalizeKingbaseIdentifier(parts[1])
|
||||
if table == "" {
|
||||
return "", normalizeKingbaseIdentifier(raw)
|
||||
}
|
||||
if schema == "" {
|
||||
return "", table
|
||||
}
|
||||
return schema, table
|
||||
}
|
||||
|
||||
return "", normalizeKingbaseIdentifier(raw)
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
// dbName 在本项目语义里是“数据库”,schema 由 table_schema 决定;这里返回全部用户 schema 的列用于查询提示。
|
||||
query := `
|
||||
|
||||
74
internal/db/kingbase_impl_test.go
Normal file
74
internal/db/kingbase_impl_test.go
Normal file
@@ -0,0 +1,74 @@
|
||||
//go:build gonavi_full_drivers || gonavi_kingbase_driver
|
||||
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeKingbaseIdentifier(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want string
|
||||
}{
|
||||
{name: "plain", in: "ldf_server", want: "ldf_server"},
|
||||
{name: "quoted", in: `"ldf_server"`, want: "ldf_server"},
|
||||
{name: "double quoted", in: `""ldf_server""`, want: "ldf_server"},
|
||||
{name: "quad quoted", in: `""""ldf_server""""`, want: "ldf_server"},
|
||||
{name: "escaped quoted", in: `\"ldf_server\"`, want: "ldf_server"},
|
||||
{name: "backtick quoted", in: "`ldf_server`", want: "ldf_server"},
|
||||
{name: "bracket quoted", in: "[ldf_server]", want: "ldf_server"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := normalizeKingbaseIdentifier(tt.in); got != tt.want {
|
||||
t.Fatalf("normalizeKingbaseIdentifier(%q) = %q, want %q", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuoteKingbaseIdent(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
want string
|
||||
}{
|
||||
{name: "plain", in: "ldf_server", want: `"ldf_server"`},
|
||||
{name: "double quoted", in: `""ldf_server""`, want: `"ldf_server"`},
|
||||
{name: "escaped quoted", in: `\"ldf_server\"`, want: `"ldf_server"`},
|
||||
{name: "with embedded quote", in: `ab"cd`, want: `"ab""cd"`},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := quoteKingbaseIdent(tt.in); got != tt.want {
|
||||
t.Fatalf("quoteKingbaseIdent(%q) = %q, want %q", tt.in, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitKingbaseQualifiedTable(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in string
|
||||
wantSchema string
|
||||
wantTable string
|
||||
}{
|
||||
{name: "plain qualified", in: "ldf_server.t_user", wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "double quoted qualified", in: `""ldf_server"".""t_user""`, wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "escaped qualified", in: `\"ldf_server\".\"t_user\"`, wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "bracket qualified", in: "[ldf_server].[t_user]", wantSchema: "ldf_server", wantTable: "t_user"},
|
||||
{name: "table only", in: `""t_user""`, wantSchema: "", wantTable: "t_user"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotSchema, gotTable := splitKingbaseQualifiedTable(tt.in)
|
||||
if gotSchema != tt.wantSchema || gotTable != tt.wantTable {
|
||||
t.Fatalf("splitKingbaseQualifiedTable(%q) = (%q, %q), want (%q, %q)", tt.in, gotSchema, gotTable, tt.wantSchema, tt.wantTable)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -40,9 +41,10 @@ func (m *MariaDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
tlsMode := resolveMySQLTLSMode(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds&tls=%s",
|
||||
config.User, config.Password, protocol, address, database, timeout, url.QueryEscape(tlsMode))
|
||||
}
|
||||
|
||||
func (m *MariaDB) Connect(config connection.ConnectionConfig) error {
|
||||
|
||||
@@ -4,6 +4,7 @@ package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
@@ -327,35 +328,60 @@ func (m *MongoDB) Connect(config connection.ConnectionConfig) error {
|
||||
m.database = "admin"
|
||||
}
|
||||
|
||||
attemptConfigs := buildMongoAuthAttempts(connectConfig)
|
||||
sslAttempts := []connection.ConnectionConfig{connectConfig}
|
||||
if shouldTrySSLPreferredFallback(connectConfig) {
|
||||
sslAttempts = append(sslAttempts, withSSLDisabled(connectConfig))
|
||||
}
|
||||
|
||||
var errorDetails []string
|
||||
for index, attemptConfig := range attemptConfigs {
|
||||
authLabel := "主库凭据"
|
||||
if index > 0 {
|
||||
authLabel = "从库凭据"
|
||||
for sslIndex, sslConfig := range sslAttempts {
|
||||
sslLabel := "SSL"
|
||||
if sslIndex > 0 {
|
||||
sslLabel = "明文回退"
|
||||
}
|
||||
|
||||
uri := m.getURI(attemptConfig)
|
||||
clientOpts := options.Client().ApplyURI(uri)
|
||||
if attemptConfig.UseProxy {
|
||||
clientOpts.SetDialer(&mongoProxyDialer{proxyConfig: attemptConfig.Proxy})
|
||||
}
|
||||
client, err := mongo.Connect(clientOpts)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s连接失败: %v", authLabel, err))
|
||||
continue
|
||||
}
|
||||
attemptConfigs := buildMongoAuthAttempts(sslConfig)
|
||||
for index, attemptConfig := range attemptConfigs {
|
||||
authLabel := "主库凭据"
|
||||
if index > 0 {
|
||||
authLabel = "从库凭据"
|
||||
}
|
||||
|
||||
m.client = client
|
||||
if err := m.Ping(); err != nil {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
|
||||
_ = client.Disconnect(ctx)
|
||||
cancel()
|
||||
m.client = nil
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s验证失败: %v", authLabel, err))
|
||||
continue
|
||||
if sslIndex > 0 {
|
||||
attemptConfig.URI = ""
|
||||
}
|
||||
uri := m.getURI(attemptConfig)
|
||||
clientOpts := options.Client().ApplyURI(uri)
|
||||
tlsEnabled, tlsInsecure := resolveMongoTLSSettings(attemptConfig)
|
||||
if tlsEnabled {
|
||||
clientOpts.SetTLSConfig(&tls.Config{
|
||||
MinVersion: tls.VersionTLS12,
|
||||
InsecureSkipVerify: tlsInsecure,
|
||||
})
|
||||
}
|
||||
if attemptConfig.UseProxy {
|
||||
clientOpts.SetDialer(&mongoProxyDialer{proxyConfig: attemptConfig.Proxy})
|
||||
}
|
||||
client, err := mongo.Connect(clientOpts)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s %s连接失败: %v", sslLabel, authLabel, err))
|
||||
continue
|
||||
}
|
||||
|
||||
m.client = client
|
||||
if err := m.Ping(); err != nil {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
|
||||
_ = client.Disconnect(ctx)
|
||||
cancel()
|
||||
m.client = nil
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s %s验证失败: %v", sslLabel, authLabel, err))
|
||||
continue
|
||||
}
|
||||
if sslIndex > 0 {
|
||||
logger.Warnf("MongoDB SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(errorDetails) > 0 {
|
||||
|
||||
1187
internal/db/mongodb_impl_v1.go
Normal file
1187
internal/db/mongodb_impl_v1.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -35,6 +35,32 @@ func ResolveOptionalDriverAgentExecutablePath(downloadDir string, driverType str
|
||||
return filepath.Join(root, normalized, optionalDriverAgentExecutableName(normalized)), nil
|
||||
}
|
||||
|
||||
func ResolveOptionalDriverAgentExecutablePathForVersion(downloadDir string, driverType string, version string) (string, error) {
|
||||
normalized := normalizeRuntimeDriverType(driverType)
|
||||
if strings.TrimSpace(normalized) == "" {
|
||||
return "", fmt.Errorf("驱动类型为空")
|
||||
}
|
||||
root, err := resolveExternalDriverRoot(downloadDir)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if normalized != "mongodb" {
|
||||
return filepath.Join(root, normalized, optionalDriverAgentExecutableName(normalized)), nil
|
||||
}
|
||||
|
||||
baseName := optionalDriverAgentExecutableName(normalized)
|
||||
ext := filepath.Ext(baseName)
|
||||
stem := strings.TrimSuffix(baseName, ext)
|
||||
major := 2
|
||||
trimmed := strings.TrimSpace(version)
|
||||
trimmed = strings.TrimPrefix(trimmed, "v")
|
||||
if strings.HasPrefix(trimmed, "1.") || trimmed == "1" {
|
||||
major = 1
|
||||
}
|
||||
versionedName := fmt.Sprintf("%s-v%d%s", stem, major, ext)
|
||||
return filepath.Join(root, normalized, versionedName), nil
|
||||
}
|
||||
func ResolveMySQLAgentExecutablePath(downloadDir string) (string, error) {
|
||||
return ResolveOptionalDriverAgentExecutablePath(downloadDir, "mysql")
|
||||
}
|
||||
|
||||
@@ -184,9 +184,10 @@ func (m *MySQLDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
tlsMode := resolveMySQLTLSMode(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds&tls=%s",
|
||||
config.User, config.Password, protocol, address, database, timeout, url.QueryEscape(tlsMode))
|
||||
}
|
||||
|
||||
func resolveMySQLCredential(config connection.ConnectionConfig, addressIndex int) (string, string) {
|
||||
|
||||
@@ -9,8 +9,10 @@ import (
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -94,6 +96,9 @@ func newOptionalDriverAgentClient(driverType string, executablePath string) (*op
|
||||
return nil, fmt.Errorf("创建 %s 驱动代理 stderr 失败:%w", driverDisplayName(driverType), err)
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
if isWindowsExecutableMachineMismatch(err) {
|
||||
return nil, fmt.Errorf("启动 %s 驱动代理失败:%w(检测到驱动代理与当前系统架构不兼容,请在驱动管理中重新安装启用)", driverDisplayName(driverType), err)
|
||||
}
|
||||
return nil, fmt.Errorf("启动 %s 驱动代理失败:%w", driverDisplayName(driverType), err)
|
||||
}
|
||||
|
||||
@@ -107,6 +112,30 @@ func newOptionalDriverAgentClient(driverType string, executablePath string) (*op
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func isWindowsExecutableMachineMismatch(err error) bool {
|
||||
if err == nil || runtime.GOOS != "windows" {
|
||||
return false
|
||||
}
|
||||
var errno syscall.Errno
|
||||
if errors.As(err, &errno) && errno == syscall.Errno(216) {
|
||||
return true
|
||||
}
|
||||
text := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
if strings.Contains(text, "not compatible with the version of windows") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(text, "win32") && strings.Contains(text, "compatible") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(text, "不是有效的win32应用程序") || strings.Contains(text, "无法在win32模式下运行") {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *optionalDriverAgentClient) captureStderr(stderr io.Reader) {
|
||||
scanner := bufio.NewScanner(stderr)
|
||||
buffer := make([]byte, 0, 8<<10)
|
||||
|
||||
@@ -35,12 +35,23 @@ func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
u.RawPath = "/" + url.PathEscape(database)
|
||||
q := url.Values{}
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeRequired:
|
||||
q.Set("SSL", "TRUE")
|
||||
q.Set("SSL VERIFY", "TRUE")
|
||||
case sslModeSkipVerify, sslModePreferred:
|
||||
q.Set("SSL", "TRUE")
|
||||
q.Set("SSL VERIFY", "FALSE")
|
||||
}
|
||||
if encoded := q.Encode(); encoded != "" {
|
||||
u.RawQuery = encoded
|
||||
}
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
serviceName := strings.TrimSpace(config.Database)
|
||||
if serviceName == "" {
|
||||
return fmt.Errorf("Oracle 连接缺少服务名(Service Name),请在连接配置中填写,例如 ORCLPDB1")
|
||||
@@ -73,22 +84,37 @@ func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = o.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("Oracle 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = o.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("oracle", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
o.conn = db
|
||||
o.pingTimeout = getConnectTimeout(config)
|
||||
if err := o.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := o.getDSN(attempt)
|
||||
db, err := sql.Open("oracle", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
o.conn = db
|
||||
o.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := o.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
o.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("Oracle SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (o *OracleDB) Close() error {
|
||||
|
||||
@@ -62,7 +62,7 @@ func (p *PostgresDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("sslmode", resolvePostgresSSLMode(config))
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
@@ -126,34 +126,49 @@ func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
logger.Infof("PostgreSQL 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
attemptDBs := resolvePostgresConnectDatabases(runConfig)
|
||||
sslAttempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
sslAttempts = append(sslAttempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
|
||||
var failures []string
|
||||
for _, dbName := range attemptDBs {
|
||||
attemptConfig := runConfig
|
||||
attemptConfig.Database = dbName
|
||||
dsn := p.getDSN(attemptConfig)
|
||||
|
||||
dbConn, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("数据库=%s 打开连接失败: %v", dbName, err))
|
||||
continue
|
||||
}
|
||||
p.conn = dbConn
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("数据库=%s 验证失败: %v", dbName, err))
|
||||
_ = dbConn.Close()
|
||||
p.conn = nil
|
||||
continue
|
||||
for sslIndex, sslConfig := range sslAttempts {
|
||||
sslLabel := "SSL"
|
||||
if sslIndex > 0 {
|
||||
sslLabel = "明文回退"
|
||||
}
|
||||
|
||||
if strings.TrimSpace(config.Database) == "" && !strings.EqualFold(dbName, "postgres") {
|
||||
logger.Infof("PostgreSQL 自动选择连接数据库:%s", dbName)
|
||||
}
|
||||
attemptDBs := resolvePostgresConnectDatabases(sslConfig)
|
||||
for _, dbName := range attemptDBs {
|
||||
attemptConfig := sslConfig
|
||||
attemptConfig.Database = dbName
|
||||
dsn := p.getDSN(attemptConfig)
|
||||
|
||||
cleanupOnFailure = false
|
||||
return nil
|
||||
dbConn, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("%s 数据库=%s 打开连接失败: %v", sslLabel, dbName, err))
|
||||
continue
|
||||
}
|
||||
p.conn = dbConn
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("%s 数据库=%s 验证失败: %v", sslLabel, dbName, err))
|
||||
_ = dbConn.Close()
|
||||
p.conn = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if sslIndex > 0 {
|
||||
logger.Warnf("PostgreSQL SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
if strings.TrimSpace(config.Database) == "" && !strings.EqualFold(dbName, "postgres") {
|
||||
logger.Infof("PostgreSQL 自动选择连接数据库:%s", dbName)
|
||||
}
|
||||
|
||||
cleanupOnFailure = false
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if len(failures) == 0 {
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
@@ -30,12 +31,44 @@ func normalizeQueryValue(v interface{}) interface{} {
|
||||
}
|
||||
|
||||
func normalizeQueryValueWithDBType(v interface{}, databaseTypeName string) interface{} {
|
||||
if tm, ok := v.(time.Time); ok {
|
||||
return normalizeTemporalValueForDisplay(tm, databaseTypeName)
|
||||
}
|
||||
if b, ok := v.([]byte); ok {
|
||||
return bytesToDisplayValue(b, databaseTypeName)
|
||||
}
|
||||
return normalizeCompositeQueryValue(v)
|
||||
}
|
||||
|
||||
func normalizeTemporalValueForDisplay(value time.Time, databaseTypeName string) interface{} {
|
||||
if value.IsZero() {
|
||||
if zeroValue, ok := zeroTemporalDisplayValue(databaseTypeName); ok {
|
||||
return zeroValue
|
||||
}
|
||||
}
|
||||
return value.Format(time.RFC3339Nano)
|
||||
}
|
||||
|
||||
func zeroTemporalDisplayValue(databaseTypeName string) (string, bool) {
|
||||
typeName := strings.ToUpper(strings.TrimSpace(databaseTypeName))
|
||||
if typeName == "" {
|
||||
return "0000-00-00 00:00:00", true
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.Contains(typeName, "TIMESTAMP") || strings.Contains(typeName, "DATETIME"):
|
||||
return "0000-00-00 00:00:00", true
|
||||
case typeName == "DATE" || typeName == "NEWDATE":
|
||||
return "0000-00-00", true
|
||||
case strings.Contains(typeName, "TIME"):
|
||||
return "00:00:00", true
|
||||
case strings.Contains(typeName, "YEAR"):
|
||||
return "0000", true
|
||||
default:
|
||||
return "", false
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeCompositeQueryValue(v interface{}) interface{} {
|
||||
if v == nil {
|
||||
return nil
|
||||
@@ -86,6 +119,16 @@ func normalizeCompositeQueryValue(v interface{}) interface{} {
|
||||
items[i] = normalizeQueryValue(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
case reflect.Struct:
|
||||
// 部分驱动(如 Kingbase)会返回复杂结构体值,直接透传会导致前端渲染和比较开销激增。
|
||||
// 统一降级为可读字符串,避免对象深层序列化触发 UI 卡顿。
|
||||
if tm, ok := v.(time.Time); ok {
|
||||
return normalizeTemporalValueForDisplay(tm, "")
|
||||
}
|
||||
if stringer, ok := v.(fmt.Stringer); ok {
|
||||
return stringer.String()
|
||||
}
|
||||
return fmt.Sprintf("%v", v)
|
||||
default:
|
||||
return normalizeUnsafeIntegerForJS(rv, v)
|
||||
}
|
||||
|
||||
@@ -2,7 +2,9 @@ package db
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type duckMapLike map[any]any
|
||||
@@ -165,3 +167,61 @@ func TestNormalizeQueryValueWithDBType_JSONNumber(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type customStructValue struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
|
||||
func (v customStructValue) String() string {
|
||||
return fmt.Sprintf("%s-%d", v.Name, v.Age)
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_StructToString(t *testing.T) {
|
||||
got := normalizeQueryValueWithDBType(customStructValue{Name: "alice", Age: 18}, "")
|
||||
if got != "alice-18" {
|
||||
t.Fatalf("结构体应降级为可读字符串,实际=%v(%T)", got, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_TimeStructToRFC3339(t *testing.T) {
|
||||
input := time.Date(2026, 3, 5, 18, 30, 15, 123456789, time.UTC)
|
||||
got := normalizeQueryValueWithDBType(input, "")
|
||||
text, ok := got.(string)
|
||||
if !ok {
|
||||
t.Fatalf("time.Time 应转为字符串,实际=%v(%T)", got, got)
|
||||
}
|
||||
if text != "2026-03-05T18:30:15.123456789Z" {
|
||||
t.Fatalf("time.Time 规整值异常,实际=%s", text)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_ZeroTemporalValues(t *testing.T) {
|
||||
zero := time.Time{}
|
||||
cases := []struct {
|
||||
name string
|
||||
dbType string
|
||||
wantText string
|
||||
}{
|
||||
{name: "date", dbType: "DATE", wantText: "0000-00-00"},
|
||||
{name: "newdate", dbType: "NEWDATE", wantText: "0000-00-00"},
|
||||
{name: "datetime", dbType: "DATETIME", wantText: "0000-00-00 00:00:00"},
|
||||
{name: "timestamp", dbType: "TIMESTAMP", wantText: "0000-00-00 00:00:00"},
|
||||
{name: "time", dbType: "TIME", wantText: "00:00:00"},
|
||||
{name: "year", dbType: "YEAR", wantText: "0000"},
|
||||
{name: "unknown", dbType: "", wantText: "0000-00-00 00:00:00"},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got := normalizeQueryValueWithDBType(zero, tc.dbType)
|
||||
text, ok := got.(string)
|
||||
if !ok {
|
||||
t.Fatalf("期望 string,实际=%v(%T)", got, got)
|
||||
}
|
||||
if text != tc.wantText {
|
||||
t.Fatalf("dbType=%s 期望=%s,实际=%s", tc.dbType, tc.wantText, text)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,8 +47,9 @@ func (s *SqlServerDB) getDSN(config connection.ConnectionConfig) string {
|
||||
q := url.Values{}
|
||||
q.Set("database", dbname)
|
||||
q.Set("connection timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
q.Set("encrypt", "disable")
|
||||
q.Set("TrustServerCertificate", "true")
|
||||
encrypt, trustServerCertificate := resolveSQLServerTLSSettings(config)
|
||||
q.Set("encrypt", encrypt)
|
||||
q.Set("TrustServerCertificate", trustServerCertificate)
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
return u.String()
|
||||
|
||||
122
internal/db/ssl_mode.go
Normal file
122
internal/db/ssl_mode.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
const (
|
||||
sslModeDisable = "disable"
|
||||
sslModePreferred = "preferred"
|
||||
sslModeRequired = "required"
|
||||
sslModeSkipVerify = "skip-verify"
|
||||
)
|
||||
|
||||
func normalizeSSLModeValue(raw string) string {
|
||||
mode := strings.ToLower(strings.TrimSpace(raw))
|
||||
switch mode {
|
||||
case "", sslModePreferred, "prefer":
|
||||
return sslModePreferred
|
||||
case sslModeRequired, "require", "on", "true", "mandatory", "strict":
|
||||
return sslModeRequired
|
||||
case sslModeSkipVerify, "insecure", "skipverify", "skip_verify", "insecure-skip-verify":
|
||||
return sslModeSkipVerify
|
||||
case sslModeDisable, "disabled", "off", "false", "none":
|
||||
return sslModeDisable
|
||||
default:
|
||||
return sslModePreferred
|
||||
}
|
||||
}
|
||||
|
||||
func normalizedSSLMode(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSL {
|
||||
return sslModeDisable
|
||||
}
|
||||
return normalizeSSLModeValue(config.SSLMode)
|
||||
}
|
||||
|
||||
func shouldTrySSLPreferredFallback(config connection.ConnectionConfig) bool {
|
||||
return config.UseSSL && normalizeSSLModeValue(config.SSLMode) == sslModePreferred
|
||||
}
|
||||
|
||||
func withSSLDisabled(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
next := config
|
||||
next.UseSSL = false
|
||||
next.SSLMode = sslModeDisable
|
||||
return next
|
||||
}
|
||||
|
||||
func resolveMySQLTLSMode(config connection.ConnectionConfig) string {
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeDisable:
|
||||
return "false"
|
||||
case sslModeRequired:
|
||||
return "true"
|
||||
case sslModeSkipVerify:
|
||||
return "skip-verify"
|
||||
default:
|
||||
return "preferred"
|
||||
}
|
||||
}
|
||||
|
||||
func resolvePostgresSSLMode(config connection.ConnectionConfig) string {
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeDisable:
|
||||
return "disable"
|
||||
case sslModeRequired:
|
||||
return "require"
|
||||
case sslModeSkipVerify:
|
||||
return "require"
|
||||
default:
|
||||
return "require"
|
||||
}
|
||||
}
|
||||
|
||||
func resolveSQLServerTLSSettings(config connection.ConnectionConfig) (encrypt string, trustServerCertificate string) {
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeDisable:
|
||||
return "disable", "true"
|
||||
case sslModeRequired:
|
||||
return "true", "false"
|
||||
case sslModeSkipVerify:
|
||||
return "true", "true"
|
||||
default:
|
||||
return "false", "true"
|
||||
}
|
||||
}
|
||||
|
||||
func resolveGenericTLSConfig(config connection.ConnectionConfig) *tls.Config {
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeDisable:
|
||||
return nil
|
||||
case sslModeRequired:
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12}
|
||||
case sslModeSkipVerify:
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12, InsecureSkipVerify: true}
|
||||
default:
|
||||
// Preferred: 先尝试 TLS(为提升兼容性默认跳过证书校验),失败时由调用方按需回退明文。
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12, InsecureSkipVerify: true}
|
||||
}
|
||||
}
|
||||
|
||||
func resolveMongoTLSSettings(config connection.ConnectionConfig) (enabled bool, insecure bool) {
|
||||
switch normalizedSSLMode(config) {
|
||||
case sslModeDisable:
|
||||
return false, false
|
||||
case sslModeRequired:
|
||||
return true, false
|
||||
case sslModeSkipVerify:
|
||||
return true, true
|
||||
default:
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
|
||||
func resolveTDengineNet(config connection.ConnectionConfig) string {
|
||||
if normalizedSSLMode(config) == sslModeDisable {
|
||||
return "ws"
|
||||
}
|
||||
return "wss"
|
||||
}
|
||||
@@ -40,11 +40,12 @@ func (t *TDengineDB) getDSN(config connection.ConnectionConfig) string {
|
||||
path = "/" + dbName
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s:%s@ws(%s)%s", user, pass, net.JoinHostPort(config.Host, strconv.Itoa(config.Port)), path)
|
||||
netType := resolveTDengineNet(config)
|
||||
return fmt.Sprintf("%s:%s@%s(%s)%s", user, pass, netType, net.JoinHostPort(config.Host, strconv.Itoa(config.Port)), path)
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("TDengine 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
@@ -68,23 +69,38 @@ func (t *TDengineDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
dsn = t.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("TDengine 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = t.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("taosWS", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
t.conn = db
|
||||
t.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := t.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := t.getDSN(attempt)
|
||||
db, err := sql.Open("taosWS", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
t.conn = db
|
||||
t.pingTimeout = getConnectTimeout(attempt)
|
||||
|
||||
if err := t.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
t.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("TDengine SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Close() error {
|
||||
|
||||
@@ -41,7 +41,7 @@ func (v *VastbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("sslmode", resolvePostgresSSLMode(config))
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
@@ -49,7 +49,7 @@ func (v *VastbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
runConfig := config
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("Vastbase 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
@@ -75,23 +75,37 @@ func (v *VastbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = v.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("Vastbase 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = v.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
attempts := []connection.ConnectionConfig{runConfig}
|
||||
if shouldTrySSLPreferredFallback(runConfig) {
|
||||
attempts = append(attempts, withSSLDisabled(runConfig))
|
||||
}
|
||||
v.conn = db
|
||||
v.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := v.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
dsn := v.getDSN(attempt)
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接打开失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
v.conn = db
|
||||
v.pingTimeout = getConnectTimeout(attempt)
|
||||
if err := v.Ping(); err != nil {
|
||||
_ = db.Close()
|
||||
v.conn = nil
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接验证失败: %v", idx+1, err))
|
||||
continue
|
||||
}
|
||||
if idx > 0 {
|
||||
logger.Warnf("Vastbase SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Close() error {
|
||||
|
||||
@@ -2,6 +2,7 @@ package redis
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net"
|
||||
"strconv"
|
||||
@@ -201,25 +202,48 @@ func (r *RedisClientImpl) Connect(config connection.ConnectionConfig) error {
|
||||
|
||||
timeout := normalizeRedisTimeout(config.Timeout)
|
||||
if r.isCluster {
|
||||
opts := &redis.ClusterOptions{
|
||||
Addrs: seedAddrs,
|
||||
Username: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
DialTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
WriteTimeout: timeout,
|
||||
attempts := []connection.ConnectionConfig{config}
|
||||
if shouldTryRedisSSLPreferredFallback(config) {
|
||||
attempts = append(attempts, withRedisSSLDisabled(config))
|
||||
}
|
||||
clusterClient := redis.NewClusterClient(opts)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
||||
defer cancel()
|
||||
if err := clusterClient.Ping(ctx).Err(); err != nil {
|
||||
clusterClient.Close()
|
||||
return fmt.Errorf("Redis 集群连接失败: %w", err)
|
||||
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
var tlsConfig *tls.Config
|
||||
if cfg := resolveRedisTLSConfig(attempt); cfg != nil {
|
||||
if host, _, err := net.SplitHostPort(seedAddrs[0]); err == nil && host != "" {
|
||||
cfg.ServerName = host
|
||||
}
|
||||
tlsConfig = cfg
|
||||
}
|
||||
opts := &redis.ClusterOptions{
|
||||
Addrs: seedAddrs,
|
||||
Username: strings.TrimSpace(attempt.User),
|
||||
Password: attempt.Password,
|
||||
DialTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
WriteTimeout: timeout,
|
||||
TLSConfig: tlsConfig,
|
||||
}
|
||||
clusterClient := redis.NewClusterClient(opts)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
||||
pingErr := clusterClient.Ping(ctx).Err()
|
||||
cancel()
|
||||
if pingErr != nil {
|
||||
clusterClient.Close()
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接失败: %v", idx+1, pingErr))
|
||||
continue
|
||||
}
|
||||
r.client = clusterClient
|
||||
r.clusterClient = clusterClient
|
||||
r.config = attempt
|
||||
if idx > 0 {
|
||||
logger.Warnf("Redis 集群 SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
logger.Infof("Redis 集群连接成功: seeds=%s 逻辑库=db%d", strings.Join(seedAddrs, ","), r.currentDB)
|
||||
return nil
|
||||
}
|
||||
r.client = clusterClient
|
||||
r.clusterClient = clusterClient
|
||||
logger.Infof("Redis 集群连接成功: seeds=%s 逻辑库=db%d", strings.Join(seedAddrs, ","), r.currentDB)
|
||||
return nil
|
||||
return fmt.Errorf("Redis 集群连接失败: %s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
addr := seedAddrs[0]
|
||||
@@ -233,29 +257,53 @@ func (r *RedisClientImpl) Connect(config connection.ConnectionConfig) error {
|
||||
logger.Infof("Redis 通过 SSH 隧道连接: %s -> %s:%d", addr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
opts := &redis.Options{
|
||||
Addr: addr,
|
||||
Username: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
DB: r.currentDB,
|
||||
DialTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
WriteTimeout: timeout,
|
||||
attempts := []connection.ConnectionConfig{config}
|
||||
if shouldTryRedisSSLPreferredFallback(config) {
|
||||
attempts = append(attempts, withRedisSSLDisabled(config))
|
||||
}
|
||||
|
||||
singleClient := redis.NewClient(opts)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
||||
defer cancel()
|
||||
var failures []string
|
||||
for idx, attempt := range attempts {
|
||||
var tlsConfig *tls.Config
|
||||
if cfg := resolveRedisTLSConfig(attempt); cfg != nil {
|
||||
if host, _, err := net.SplitHostPort(addr); err == nil && host != "" {
|
||||
cfg.ServerName = host
|
||||
}
|
||||
tlsConfig = cfg
|
||||
}
|
||||
|
||||
if err := singleClient.Ping(ctx).Err(); err != nil {
|
||||
singleClient.Close()
|
||||
return fmt.Errorf("Redis 连接失败: %w", err)
|
||||
opts := &redis.Options{
|
||||
Addr: addr,
|
||||
Username: strings.TrimSpace(attempt.User),
|
||||
Password: attempt.Password,
|
||||
DB: r.currentDB,
|
||||
DialTimeout: timeout,
|
||||
ReadTimeout: timeout,
|
||||
WriteTimeout: timeout,
|
||||
TLSConfig: tlsConfig,
|
||||
}
|
||||
|
||||
singleClient := redis.NewClient(opts)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
||||
pingErr := singleClient.Ping(ctx).Err()
|
||||
cancel()
|
||||
if pingErr != nil {
|
||||
singleClient.Close()
|
||||
failures = append(failures, fmt.Sprintf("第%d次连接失败: %v", idx+1, pingErr))
|
||||
continue
|
||||
}
|
||||
|
||||
r.client = singleClient
|
||||
r.singleClient = singleClient
|
||||
r.config = attempt
|
||||
if idx > 0 {
|
||||
logger.Warnf("Redis SSL 优先连接失败,已回退至明文连接")
|
||||
}
|
||||
logger.Infof("Redis 连接成功: %s DB=%d", addr, r.currentDB)
|
||||
return nil
|
||||
}
|
||||
|
||||
r.client = singleClient
|
||||
r.singleClient = singleClient
|
||||
logger.Infof("Redis 连接成功: %s DB=%d", addr, r.currentDB)
|
||||
return nil
|
||||
return fmt.Errorf("Redis 连接失败: %s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
// Close closes the Redis connection
|
||||
|
||||
55
internal/redis/ssl_mode.go
Normal file
55
internal/redis/ssl_mode.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package redis
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func normalizeRedisSSLMode(raw string) string {
|
||||
mode := strings.ToLower(strings.TrimSpace(raw))
|
||||
switch mode {
|
||||
case "", "preferred", "prefer":
|
||||
return "preferred"
|
||||
case "required", "require", "on", "true", "mandatory", "strict":
|
||||
return "required"
|
||||
case "skip-verify", "insecure", "skipverify", "skip_verify", "insecure-skip-verify":
|
||||
return "skip-verify"
|
||||
case "disable", "disabled", "off", "false", "none":
|
||||
return "disable"
|
||||
default:
|
||||
return "preferred"
|
||||
}
|
||||
}
|
||||
|
||||
func redisSSLMode(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSL {
|
||||
return "disable"
|
||||
}
|
||||
return normalizeRedisSSLMode(config.SSLMode)
|
||||
}
|
||||
|
||||
func shouldTryRedisSSLPreferredFallback(config connection.ConnectionConfig) bool {
|
||||
return config.UseSSL && normalizeRedisSSLMode(config.SSLMode) == "preferred"
|
||||
}
|
||||
|
||||
func withRedisSSLDisabled(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
next := config
|
||||
next.UseSSL = false
|
||||
next.SSLMode = "disable"
|
||||
return next
|
||||
}
|
||||
|
||||
func resolveRedisTLSConfig(config connection.ConnectionConfig) *tls.Config {
|
||||
switch redisSSLMode(config) {
|
||||
case "disable":
|
||||
return nil
|
||||
case "required":
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12}
|
||||
case "skip-verify":
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12, InsecureSkipVerify: true}
|
||||
default:
|
||||
return &tls.Config{MinVersion: tls.VersionTLS12, InsecureSkipVerify: true}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user