mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 20:29:43 +08:00
Compare commits
99 Commits
release/0.
...
release/0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17d174bc5b | ||
|
|
9320f524a2 | ||
|
|
e31dc4e7f1 | ||
|
|
ab92e94bf8 | ||
|
|
da5708b5bc | ||
|
|
ecf47da81b | ||
|
|
21c8b9a102 | ||
|
|
a07b418b8f | ||
|
|
4bf10e5612 | ||
|
|
e6fe6eb026 | ||
|
|
b4f80f39df | ||
|
|
4d32dd2cb5 | ||
|
|
de8fb60a30 | ||
|
|
52abed83e6 | ||
|
|
80dc863455 | ||
|
|
fa318a9f0e | ||
|
|
78e35a5be8 | ||
|
|
35ed555857 | ||
|
|
f3130ff517 | ||
|
|
012c99be9e | ||
|
|
c8575c315b | ||
|
|
601d69faeb | ||
|
|
fdb7781a9b | ||
|
|
087578693e | ||
|
|
aceabb63f5 | ||
|
|
8587f72f81 | ||
|
|
83ad3b09d9 | ||
|
|
72811092b4 | ||
|
|
b67135e2c1 | ||
|
|
f5e16b0b70 | ||
|
|
f8535dd272 | ||
|
|
5cd8681b80 | ||
|
|
4b381c82b5 | ||
|
|
820b064e7f | ||
|
|
70cb6148c6 | ||
|
|
0cb9cb8bc9 | ||
|
|
c2c88d743b | ||
|
|
e8ef6b0b38 | ||
|
|
257459f96a | ||
|
|
027115ab87 | ||
|
|
96cb8134c4 | ||
|
|
b108cd1c90 | ||
|
|
d1ce9cefb8 | ||
|
|
f75e04f091 | ||
|
|
1fc182817e | ||
|
|
3c28b0adeb | ||
|
|
ec4b3d9018 | ||
|
|
8654485cfe | ||
|
|
9beb73ea40 | ||
|
|
3b19a33d4b | ||
|
|
13ba78103c | ||
|
|
538e4a1506 | ||
|
|
934581c796 | ||
|
|
1486b98d27 | ||
|
|
6cda430f03 | ||
|
|
f56c3d5f6e | ||
|
|
74c9143c95 | ||
|
|
0e4a833ffa | ||
|
|
37ad9885b7 | ||
|
|
5cef9a4032 | ||
|
|
f49767c38b | ||
|
|
7e8699ba02 | ||
|
|
5f0ce5ed7a | ||
|
|
49c7620bdd | ||
|
|
80fa7a1acd | ||
|
|
68770a42e2 | ||
|
|
06aebf716e | ||
|
|
f551b19f40 | ||
|
|
6674ad69e1 | ||
|
|
37d35684f1 | ||
|
|
71e5de0cdc | ||
|
|
d8656c6c9c | ||
|
|
443b487a02 | ||
|
|
bac57ebdf0 | ||
|
|
213a33e4f3 | ||
|
|
a00f87582d | ||
|
|
f129623000 | ||
|
|
8dbc97e466 | ||
|
|
4a0db185c0 | ||
|
|
5793f63ac8 | ||
|
|
8aabc67634 | ||
|
|
34c494ce51 | ||
|
|
178de02783 | ||
|
|
94e5b8d2c6 | ||
|
|
89e2247c05 | ||
|
|
b2ede61b79 | ||
|
|
db381ae9d1 | ||
|
|
f946cfd647 | ||
|
|
46c48c5ea8 | ||
|
|
e3bf160072 | ||
|
|
791425a5a8 | ||
|
|
d7acfd1af9 | ||
|
|
80fbfd6365 | ||
|
|
2ca27ebfb0 | ||
|
|
aa7651d95c | ||
|
|
88952e87c1 | ||
|
|
c981a65834 | ||
|
|
b9d9ab5464 | ||
|
|
6b503480cf |
58
.github/ISSUE_TEMPLATE/01-bug_report.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/01-bug_report.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 问题反馈
|
||||
description: 软件问题反馈
|
||||
title: "[Bug] "
|
||||
labels: ["bug"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: searched
|
||||
attributes:
|
||||
label: 已经搜索过 Issues,未发现重复问题*
|
||||
options:
|
||||
- label: 我已经搜索过 Issues,没有发现重复问题
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: system
|
||||
attributes:
|
||||
label: 操作系统及版本
|
||||
placeholder: Windows 10 22H2 / macOS Mojave / Linux
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: 软件安装版本
|
||||
placeholder: v0.2.3
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: 问题简述及复现流程
|
||||
description: 请详细描述你遇到的问题,并提供复现步骤
|
||||
placeholder: |
|
||||
1. 打开软件
|
||||
2. 点击 xxx
|
||||
3. 预期结果是 ...
|
||||
4. 实际结果是 ...
|
||||
5. 截图 ...
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: 其他补充
|
||||
description: 如果你有额外信息,请在此填写
|
||||
placeholder: 可选
|
||||
|
||||
- type: checkboxes
|
||||
id: pr
|
||||
attributes:
|
||||
label: 是否愿意提交 PR 修复当前 Issue
|
||||
options:
|
||||
- label: 我愿意尝试提交 PR
|
||||
37
.github/ISSUE_TEMPLATE/02-feature_request.yml
vendored
Normal file
37
.github/ISSUE_TEMPLATE/02-feature_request.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: 功能建议
|
||||
description: 添加全新功能或改进现有功能
|
||||
title: "[Enhancement] "
|
||||
labels: ["enhancement"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: searched
|
||||
attributes:
|
||||
label: 已经搜索过 Issues,未发现重复问题*
|
||||
options:
|
||||
- label: 我已经搜索过 Issues,没有发现重复问题
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: 功能描述
|
||||
description: 请详细描述你希望添加或改进的功能
|
||||
placeholder: 请描述你想要的功能
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: 其他补充
|
||||
description: 如果你有额外信息,请在此填写
|
||||
placeholder: 可选
|
||||
|
||||
- type: checkboxes
|
||||
id: pr
|
||||
attributes:
|
||||
label: 是否愿意提交 PR 实现当前 Issue
|
||||
options:
|
||||
- label: 我愿意尝试提交 PR
|
||||
30
.github/ISSUE_TEMPLATE/03-generic.yml
vendored
Normal file
30
.github/ISSUE_TEMPLATE/03-generic.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: 其他反馈
|
||||
description: 其他类型反馈、建议或讨论
|
||||
title: "[Question] "
|
||||
labels: ["question"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: searched
|
||||
attributes:
|
||||
label: 已经搜索过 Issues,未发现重复问题*
|
||||
options:
|
||||
- label: 我已经搜索过 Issues,没有发现重复问题
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: content
|
||||
attributes:
|
||||
label: 内容
|
||||
description: 请填写你的反馈、建议或讨论内容
|
||||
placeholder: 请描述你的问题或想法
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: 其他补充
|
||||
description: 如果你有额外信息,请在此填写
|
||||
placeholder: 可选
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: false
|
||||
22
.github/workflows/release-winget.yml
vendored
Normal file
22
.github/workflows/release-winget.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Publish to WinGet
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
required: true
|
||||
description: 'Tag of release you want to publish'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal9/winget-releaser@v2
|
||||
with:
|
||||
identifier: Syngnat.GoNavi
|
||||
installers-regex: 'GoNavi-windows-(amd64|arm64)\.exe$'
|
||||
release-tag: ${{ inputs.release_tag || github.ref_name }}
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
127
.github/workflows/release.yml
vendored
127
.github/workflows/release.yml
vendored
@@ -29,6 +29,13 @@ jobs:
|
||||
platform: windows/amd64
|
||||
artifact_name: GoNavi-windows-amd64
|
||||
asset_ext: .exe
|
||||
- os: windows-latest
|
||||
platform: windows/arm64
|
||||
artifact_name: GoNavi-windows-arm64
|
||||
asset_ext: .exe
|
||||
- os: ubuntu-22.04
|
||||
platform: linux/amd64
|
||||
artifact_name: GoNavi-linux-amd64
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -45,13 +52,43 @@ jobs:
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
# Linux Dependencies (GTK3, WebKit2GTK required by Wails)
|
||||
- name: Install Linux Dependencies
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libfuse2
|
||||
|
||||
# Download linuxdeploy tools for AppImage packaging
|
||||
LINUXDEPLOY_URL="https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage"
|
||||
PLUGIN_URL="https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/releases/download/continuous/linuxdeploy-plugin-gtk-x86_64.AppImage"
|
||||
|
||||
echo "📥 下载 linuxdeploy..."
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 \
|
||||
-O /tmp/linuxdeploy "$LINUXDEPLOY_URL" || {
|
||||
echo "⚠️ linuxdeploy 下载失败,AppImage 打包将跳过"
|
||||
touch /tmp/skip-appimage
|
||||
}
|
||||
|
||||
echo "📥 下载 linuxdeploy-plugin-gtk..."
|
||||
wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 --tries=3 \
|
||||
-O /tmp/linuxdeploy-plugin-gtk "$PLUGIN_URL" || {
|
||||
echo "⚠️ linuxdeploy-plugin-gtk 下载失败,AppImage 打包将跳过"
|
||||
touch /tmp/skip-appimage
|
||||
}
|
||||
|
||||
if [ ! -f /tmp/skip-appimage ]; then
|
||||
chmod +x /tmp/linuxdeploy /tmp/linuxdeploy-plugin-gtk
|
||||
echo "✅ AppImage 工具准备完成"
|
||||
fi
|
||||
|
||||
- name: Install Wails
|
||||
run: go install -v github.com/wailsapp/wails/v2/cmd/wails@latest
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.artifact_name }}
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.artifact_name }} -ldflags "-X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
|
||||
# macOS Packaging
|
||||
- name: Package macOS DMG
|
||||
@@ -107,12 +144,93 @@ jobs:
|
||||
echo "📦 正在移动 $FINAL_EXE 到根目录..."
|
||||
mv "$FINAL_EXE" "../../$FINAL_EXE"
|
||||
|
||||
# Linux Packaging (tar.gz and AppImage)
|
||||
- name: Package Linux
|
||||
if: contains(matrix.platform, 'linux')
|
||||
run: |
|
||||
cd build/bin
|
||||
TARGET="${{ matrix.artifact_name }}"
|
||||
|
||||
if [ ! -f "$TARGET" ]; then
|
||||
echo "❌ 未找到构建产物 '$TARGET'!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
chmod +x "$TARGET"
|
||||
|
||||
# 1. Create tar.gz
|
||||
echo "📦 正在打包 $TARGET.tar.gz..."
|
||||
tar -czvf "$TARGET.tar.gz" "$TARGET"
|
||||
mv "$TARGET.tar.gz" ../../
|
||||
|
||||
# 2. Create AppImage (skip for ARM64 or if tools unavailable)
|
||||
if [ -f /tmp/skip-appimage ]; then
|
||||
echo "⚠️ 跳过 AppImage 打包"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📦 正在生成 AppImage..."
|
||||
|
||||
# Create AppDir structure
|
||||
mkdir -p AppDir/usr/bin
|
||||
mkdir -p AppDir/usr/share/applications
|
||||
mkdir -p AppDir/usr/share/icons/hicolor/256x256/apps
|
||||
|
||||
cp "$TARGET" AppDir/usr/bin/gonavi
|
||||
|
||||
# Create desktop file
|
||||
printf '%s\n' \
|
||||
'[Desktop Entry]' \
|
||||
'Name=GoNavi' \
|
||||
'Exec=gonavi' \
|
||||
'Icon=gonavi' \
|
||||
'Type=Application' \
|
||||
'Categories=Development;Database;' \
|
||||
'Comment=Database Management Tool' \
|
||||
> AppDir/usr/share/applications/gonavi.desktop
|
||||
|
||||
cp AppDir/usr/share/applications/gonavi.desktop AppDir/gonavi.desktop
|
||||
|
||||
# Create a simple icon (or use existing if available)
|
||||
if [ -f "../../build/appicon.png" ]; then
|
||||
cp "../../build/appicon.png" AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
cp "../../build/appicon.png" AppDir/gonavi.png
|
||||
else
|
||||
# Create a placeholder icon
|
||||
convert -size 256x256 xc:#336791 -fill white -gravity center -pointsize 48 -annotate 0 "GoNavi" AppDir/gonavi.png || \
|
||||
wget -q "https://via.placeholder.com/256/336791/FFFFFF?text=GoNavi" -O AppDir/gonavi.png || \
|
||||
touch AppDir/gonavi.png
|
||||
cp AppDir/gonavi.png AppDir/usr/share/icons/hicolor/256x256/apps/gonavi.png
|
||||
fi
|
||||
|
||||
# Build AppImage
|
||||
export DEPLOY_GTK_VERSION=3
|
||||
/tmp/linuxdeploy --appdir AppDir --plugin gtk --output appimage || {
|
||||
echo "⚠️ AppImage 生成失败,但 tar.gz 已成功生成"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Rename output
|
||||
mv GoNavi*.AppImage "$TARGET.AppImage" 2>/dev/null || {
|
||||
echo "⚠️ AppImage 重命名失败"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if [ -f "$TARGET.AppImage" ]; then
|
||||
mv "$TARGET.AppImage" ../../
|
||||
echo "✅ AppImage 生成成功"
|
||||
fi
|
||||
|
||||
# Upload to Actions Artifacts (Temporary Storage)
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-artifacts-${{ strategy.job-index }} # Unique name per job
|
||||
path: GoNavi-*${{ matrix.asset_ext }}
|
||||
path: |
|
||||
GoNavi-*.dmg
|
||||
GoNavi-*.exe
|
||||
GoNavi-*.tar.gz
|
||||
GoNavi-*.AppImage
|
||||
retention-days: 1
|
||||
|
||||
# Phase 2: Collect all artifacts and Publish Release (Single Job)
|
||||
@@ -131,6 +249,11 @@ jobs:
|
||||
- name: List Assets
|
||||
run: ls -R release-assets
|
||||
|
||||
- name: Generate SHA256SUMS
|
||||
run: |
|
||||
cd release-assets
|
||||
sha256sum * > SHA256SUMS
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -6,7 +6,7 @@
|
||||
frontend/release/
|
||||
**/release/
|
||||
**/dist/
|
||||
**/build/
|
||||
build/bin/
|
||||
|
||||
# wails / node artifacts (按需)
|
||||
node_modules/
|
||||
@@ -17,3 +17,5 @@ dist/
|
||||
GoNavi-Wails
|
||||
GoNavi-Wails.exe
|
||||
.ace-tool/
|
||||
.claude/
|
||||
tmpclaude-*
|
||||
|
||||
37
README.md
37
README.md
@@ -31,16 +31,45 @@
|
||||
- **虚拟滚动**:轻松处理海量数据展示,拒绝卡顿。
|
||||
|
||||
### 🔌 多数据库支持
|
||||
- **MySQL**:完整的支持,包括表结构设计、索引管理、外键管理等。
|
||||
- **PostgreSQL**:基础支持(持续完善中)。
|
||||
- **MySQL**:完整支持,涵盖数据编辑、结构管理与导入导出。
|
||||
- **PostgreSQL**:数据查看与编辑支持,事务提交能力持续完善。
|
||||
- **SQLite**:本地文件数据库支持。
|
||||
- **Oracle**:基础数据访问与编辑支持。
|
||||
- **Dameng(达梦)**:基础数据访问与编辑支持。
|
||||
- **Kingbase(人大金仓)**:基础数据访问与编辑支持。
|
||||
- **TDengine**:时序数据库连接、库表浏览与 SQL 查询支持。
|
||||
- **Redis**:Key/Value 浏览、命令执行、视图与编码切换。
|
||||
- **自定义驱动**:支持配置 Driver/DSN 接入更多数据源。
|
||||
- **SSH 隧道**:内置 SSH 隧道支持,安全连接内网数据库。
|
||||
|
||||
### 📊 强大的数据管理 (DataGrid)
|
||||
- **所见即所得编辑**:直接在表格中双击单元格修改数据。
|
||||
- **事务操作**:支持批量新增、修改、删除,一键提交或回滚事务。
|
||||
- **批量事务操作**:支持批量新增、修改、删除,一键提交或回滚事务。
|
||||
- **大字段编辑**:双击大字段自动打开弹窗编辑器,避免卡顿。
|
||||
- **右键上下文菜单**:快速设置 NULL、复制/导出等操作。
|
||||
- **智能上下文**:自动识别单表查询,解锁编辑功能;复杂查询自动切换为只读模式。
|
||||
- **数据导出**:支持导出为 CSV, Excel (XLSX), JSON, Markdown 等格式。
|
||||
- **批量导出/备份**:支持表与数据库的批量导出/备份。
|
||||
- **数据导出**:支持 CSV、Excel (XLSX)、JSON、Markdown 等格式。
|
||||
|
||||
### 🧰 批量导出/备份
|
||||
- **数据库批量导出**:支持结构导出与结构+数据备份。
|
||||
- **表批量导出**:支持多表一键导出/备份。
|
||||
- **智能上下文检测**:自动判断目标范围,避免误操作。
|
||||
|
||||
### 🧩 Redis 视图与编码
|
||||
- **视图模式切换**:自动/原始文本/UTF-8/十六进制多模式显示。
|
||||
- **智能解码**:针对二进制值进行 UTF-8 质量判定与中文字符识别。
|
||||
- **命令执行**:内置命令面板快速操作。
|
||||
|
||||
### 🔄 数据同步与导入导出
|
||||
- **连接配置导入/导出**:支持配置 JSON 导入导出,便于团队共享。
|
||||
- **数据同步**:内置数据同步面板,支持跨库同步任务配置。
|
||||
|
||||
### 🆙 在线更新
|
||||
- **自动更新**:启动/定时/手动检查更新,自动下载并提示重启完成更新。
|
||||
|
||||
### 🧾 可观测性
|
||||
- **SQL 执行日志**:实时查看 SQL 与执行耗时,便于排障与优化。
|
||||
|
||||
### 📝 智能 SQL 编辑器
|
||||
- **Monaco Editor 内核**:集成 VS Code 同款编辑器,体验极佳。
|
||||
|
||||
141
build-release.sh
141
build-release.sh
@@ -12,6 +12,7 @@ if [ -z "$VERSION" ]; then
|
||||
VERSION="0.0.0"
|
||||
fi
|
||||
echo "ℹ️ 检测到版本号: $VERSION"
|
||||
LDFLAGS="-X GoNavi-Wails/internal/app.AppVersion=$VERSION"
|
||||
|
||||
# 颜色配置
|
||||
GREEN='\033[0;32m'
|
||||
@@ -27,7 +28,7 @@ mkdir -p $DIST_DIR
|
||||
|
||||
# --- macOS ARM64 构建 ---
|
||||
echo -e "${GREEN}🍎 正在构建 macOS (arm64)...${NC}"
|
||||
wails build -platform darwin/arm64 -clean
|
||||
wails build -platform darwin/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
APP_SRC="$BUILD_BIN_DIR/$DEFAULT_BINARY_NAME.app"
|
||||
APP_DEST_NAME="${APP_NAME}-${VERSION}-mac-arm64.app"
|
||||
@@ -81,7 +82,7 @@ fi
|
||||
|
||||
# --- macOS AMD64 构建 ---
|
||||
echo -e "${GREEN}🍎 正在构建 macOS (amd64)...${NC}"
|
||||
wails build -platform darwin/amd64 -clean
|
||||
wails build -platform darwin/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
APP_SRC="$BUILD_BIN_DIR/$DEFAULT_BINARY_NAME.app"
|
||||
APP_DEST_NAME="${APP_NAME}-${VERSION}-mac-amd64.app"
|
||||
@@ -131,19 +132,147 @@ fi
|
||||
# --- Windows AMD64 构建 ---
|
||||
echo -e "${GREEN}🪟 正在构建 Windows (amd64)...${NC}"
|
||||
if command -v x86_64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/amd64 -clean
|
||||
wails build -platform windows/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-amd64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows 构建失败。${NC}"
|
||||
echo -e "${RED} ❌ Windows amd64 构建失败。${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 MinGW 工具 (x86_64-w64-mingw32-gcc),跳过 Windows 构建。${NC}"
|
||||
echo -e "${YELLOW} ⚠️ 未找到 MinGW 工具 (x86_64-w64-mingw32-gcc),跳过 Windows amd64 构建。${NC}"
|
||||
fi
|
||||
|
||||
# --- Windows ARM64 构建 ---
|
||||
echo -e "${GREEN}🪟 正在构建 Windows (arm64)...${NC}"
|
||||
if command -v aarch64-w64-mingw32-gcc &> /dev/null; then
|
||||
wails build -platform windows/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}.exe" "$DIST_DIR/${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-windows-arm64.exe"
|
||||
else
|
||||
echo -e "${RED} ❌ Windows arm64 构建失败。${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 MinGW ARM64 工具 (aarch64-w64-mingw32-gcc),跳过 Windows arm64 构建。${NC}"
|
||||
echo " 安装命令: brew install mingw-w64 (需要支持 ARM64 的版本)"
|
||||
fi
|
||||
|
||||
# --- Linux AMD64 构建 ---
|
||||
echo -e "${GREEN}🐧 正在构建 Linux (amd64)...${NC}"
|
||||
# 检测当前系统
|
||||
CURRENT_OS=$(uname -s)
|
||||
CURRENT_ARCH=$(uname -m)
|
||||
|
||||
if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "x86_64" ]; then
|
||||
# 本机 Linux amd64,直接构建
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
# 打包为 tar.gz
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
cd ..
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-linux-amd64.tar.gz"
|
||||
else
|
||||
echo -e "${RED} ❌ Linux amd64 构建失败。${NC}"
|
||||
fi
|
||||
elif command -v x86_64-linux-gnu-gcc &> /dev/null; then
|
||||
# macOS 或其他系统,尝试交叉编译
|
||||
export CC=x86_64-linux-gnu-gcc
|
||||
export CXX=x86_64-linux-gnu-g++
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/amd64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-amd64"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-amd64.tar.gz" "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-amd64"
|
||||
cd ..
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-linux-amd64.tar.gz"
|
||||
else
|
||||
echo -e "${RED} ❌ Linux amd64 交叉编译失败。${NC}"
|
||||
fi
|
||||
unset CC CXX CGO_ENABLED
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 非 Linux 系统且未找到交叉编译工具,跳过 Linux amd64 构建。${NC}"
|
||||
echo " 在 Linux 上运行此脚本可直接构建,或安装交叉编译工具链。"
|
||||
fi
|
||||
|
||||
# --- Linux ARM64 构建 ---
|
||||
echo -e "${GREEN}🐧 正在构建 Linux (arm64)...${NC}"
|
||||
if [ "$CURRENT_OS" = "Linux" ] && [ "$CURRENT_ARCH" = "aarch64" ]; then
|
||||
# 本机 Linux arm64,直接构建
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
cd ..
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-linux-arm64.tar.gz"
|
||||
else
|
||||
echo -e "${RED} ❌ Linux arm64 构建失败。${NC}"
|
||||
fi
|
||||
elif command -v aarch64-linux-gnu-gcc &> /dev/null; then
|
||||
# 交叉编译
|
||||
export CC=aarch64-linux-gnu-gcc
|
||||
export CXX=aarch64-linux-gnu-g++
|
||||
export CGO_ENABLED=1
|
||||
wails build -platform linux/arm64 -clean -ldflags "$LDFLAGS"
|
||||
if [ $? -eq 0 ]; then
|
||||
mv "$BUILD_BIN_DIR/${DEFAULT_BINARY_NAME}" "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
chmod +x "$DIST_DIR/${APP_NAME}-${VERSION}-linux-arm64"
|
||||
cd "$DIST_DIR"
|
||||
tar -czvf "${APP_NAME}-${VERSION}-linux-arm64.tar.gz" "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
rm "${APP_NAME}-${VERSION}-linux-arm64"
|
||||
cd ..
|
||||
echo " ✅ 已生成 ${APP_NAME}-${VERSION}-linux-arm64.tar.gz"
|
||||
else
|
||||
echo -e "${RED} ❌ Linux arm64 交叉编译失败。${NC}"
|
||||
fi
|
||||
unset CC CXX CGO_ENABLED
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 非 Linux ARM64 系统且未找到交叉编译工具,跳过 Linux arm64 构建。${NC}"
|
||||
echo " 安装命令 (Ubuntu): sudo apt install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu"
|
||||
echo " 安装命令 (macOS): brew install aarch64-linux-gnu-gcc (需要第三方 tap)"
|
||||
fi
|
||||
|
||||
# 清理中间构建目录
|
||||
rm -rf "build/bin"
|
||||
|
||||
echo -e "${GREEN}🔐 生成 SHA256SUMS...${NC}"
|
||||
if command -v sha256sum &> /dev/null; then
|
||||
cd "$DIST_DIR"
|
||||
: > SHA256SUMS
|
||||
for f in *; do
|
||||
[ -f "$f" ] || continue
|
||||
sha256sum "$f" >> SHA256SUMS
|
||||
done
|
||||
cd ..
|
||||
elif command -v shasum &> /dev/null; then
|
||||
cd "$DIST_DIR"
|
||||
: > SHA256SUMS
|
||||
for f in *; do
|
||||
[ -f "$f" ] || continue
|
||||
shasum -a 256 "$f" >> SHA256SUMS
|
||||
done
|
||||
cd ..
|
||||
else
|
||||
echo -e "${YELLOW} ⚠️ 未找到 sha256sum/shasum,跳过校验文件生成。${NC}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}🎉 所有任务完成!构建产物在 'dist/' 目录下:${NC}"
|
||||
ls -1 "$DIST_DIR"
|
||||
ls -lh "$DIST_DIR"
|
||||
echo ""
|
||||
echo -e "${GREEN}📋 支持的平台:${NC}"
|
||||
echo " • macOS (Intel/Apple Silicon): .dmg"
|
||||
echo " • Windows (x64/ARM64): .exe"
|
||||
echo " • Linux (x64/ARM64): .tar.gz"
|
||||
echo ""
|
||||
echo -e "${YELLOW}💡 提示:Linux AppImage 包请使用 GitHub Actions CI/CD 构建。${NC}"
|
||||
|
||||
BIN
build/appicon.png
Normal file
BIN
build/appicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 134 KiB |
68
build/darwin/Info.dev.plist
Normal file
68
build/darwin/Info.dev.plist
Normal file
@@ -0,0 +1,68 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>{{.Info.ProductName}}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>{{.OutputFilename}}</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.wails.{{.Name}}.dev</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>{{.Info.Comments}}</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>iconfile</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>10.13.0</string>
|
||||
<key>NSHighResolutionCapable</key>
|
||||
<string>true</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>{{.Info.Copyright}}</string>
|
||||
{{if .Info.FileAssociations}}
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
{{range .Info.FileAssociations}}
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
<string>{{.Ext}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>{{.Name}}</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
<key>CFBundleTypeIconFile</key>
|
||||
<string>{{.IconName}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
{{if .Info.Protocols}}
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
{{range .Info.Protocols}}
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.wails.{{.Scheme}}</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>{{.Scheme}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
<key>NSAppTransportSecurity</key>
|
||||
<dict>
|
||||
<key>NSAllowsLocalNetworking</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
63
build/darwin/Info.plist
Normal file
63
build/darwin/Info.plist
Normal file
@@ -0,0 +1,63 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>{{.Info.ProductName}}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>{{.OutputFilename}}</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.wails.{{.Name}}</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>{{.Info.Comments}}</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>{{.Info.ProductVersion}}</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>iconfile</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>10.13.0</string>
|
||||
<key>NSHighResolutionCapable</key>
|
||||
<string>true</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>{{.Info.Copyright}}</string>
|
||||
{{if .Info.FileAssociations}}
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
{{range .Info.FileAssociations}}
|
||||
<dict>
|
||||
<key>CFBundleTypeExtensions</key>
|
||||
<array>
|
||||
<string>{{.Ext}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>{{.Name}}</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
<key>CFBundleTypeIconFile</key>
|
||||
<string>{{.IconName}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
{{if .Info.Protocols}}
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
{{range .Info.Protocols}}
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.wails.{{.Scheme}}</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>{{.Scheme}}</string>
|
||||
</array>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>{{.Role}}</string>
|
||||
</dict>
|
||||
{{end}}
|
||||
</array>
|
||||
{{end}}
|
||||
</dict>
|
||||
</plist>
|
||||
BIN
build/darwin/icon.icns
Normal file
BIN
build/darwin/icon.icns
Normal file
Binary file not shown.
BIN
build/windows/icon.ico
Normal file
BIN
build/windows/icon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
15
build/windows/info.json
Normal file
15
build/windows/info.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"fixed": {
|
||||
"file_version": "{{.Info.ProductVersion}}"
|
||||
},
|
||||
"info": {
|
||||
"0000": {
|
||||
"ProductVersion": "{{.Info.ProductVersion}}",
|
||||
"CompanyName": "{{.Info.CompanyName}}",
|
||||
"FileDescription": "{{.Info.ProductName}}",
|
||||
"LegalCopyright": "{{.Info.Copyright}}",
|
||||
"ProductName": "{{.Info.ProductName}}",
|
||||
"Comments": "{{.Info.Comments}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
15
build/windows/wails.exe.manifest
Normal file
15
build/windows/wails.exe.manifest
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly manifestVersion="1.0" xmlns="urn:schemas-microsoft-com:asm.v1" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
|
||||
<assemblyIdentity type="win32" name="com.wails.{{.Name}}" version="{{.Info.ProductVersion}}.0" processorArchitecture="*"/>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type="win32" name="Microsoft.Windows.Common-Controls" version="6.0.0.0" processorArchitecture="*" publicKeyToken="6595b64144ccf1df" language="*"/>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true/pm</dpiAware> <!-- fallback for Windows 7 and 8 -->
|
||||
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">permonitorv2,permonitor</dpiAwareness> <!-- falls back to per-monitor if per-monitor v2 is not supported -->
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
</assembly>
|
||||
164
docs/HighGo_Optional_Code_Changes.md
Normal file
164
docs/HighGo_Optional_Code_Changes.md
Normal file
@@ -0,0 +1,164 @@
|
||||
# HighGo 可选代码优化建议
|
||||
|
||||
## 一、sslmode 配置优化
|
||||
|
||||
### 当前状态
|
||||
|
||||
**文件**:`internal/db/highgo_impl.go:43`
|
||||
|
||||
**当前代码**:
|
||||
```go
|
||||
q.Set("sslmode", "disable")
|
||||
```
|
||||
|
||||
### 建议修改
|
||||
|
||||
根据瀚高官方文档,sslmode 的默认值应该是 `require`。建议修改为:
|
||||
|
||||
```go
|
||||
q.Set("sslmode", "require")
|
||||
```
|
||||
|
||||
### 修改原因
|
||||
|
||||
1. **符合官方规范**:瀚高官方文档明确指出默认 sslmode 为 `require`
|
||||
2. **安全性提升**:启用 SSL 加密可以保护数据传输安全
|
||||
3. **生产环境最佳实践**:生产环境应该启用 SSL 连接
|
||||
|
||||
### 是否需要修改?
|
||||
|
||||
**不一定需要修改**,取决于您的实际环境:
|
||||
|
||||
#### 保持 `disable` 的场景:
|
||||
- ✅ 开发/测试环境
|
||||
- ✅ HighGo 服务器未配置 SSL 证书
|
||||
- ✅ 内网环境,不需要加密传输
|
||||
- ✅ 快速测试连接功能
|
||||
|
||||
#### 修改为 `require` 的场景:
|
||||
- ✅ 生产环境
|
||||
- ✅ HighGo 服务器已配置 SSL 证书
|
||||
- ✅ 跨网络连接,需要加密保护
|
||||
- ✅ 符合安全合规要求
|
||||
|
||||
### 如何修改
|
||||
|
||||
如果您决定修改,可以使用以下命令:
|
||||
|
||||
**方式 1:直接修改(固定为 require)**
|
||||
```go
|
||||
// 文件:internal/db/highgo_impl.go 第 43 行
|
||||
q.Set("sslmode", "require")
|
||||
```
|
||||
|
||||
**方式 2:可配置(推荐)**
|
||||
|
||||
如果希望让用户可以选择 sslmode,可以修改为:
|
||||
|
||||
```go
|
||||
// 在 getDSN 方法中
|
||||
sslmode := "disable" // 默认值
|
||||
if config.SSLMode != "" {
|
||||
sslmode = config.SSLMode
|
||||
}
|
||||
q.Set("sslmode", sslmode)
|
||||
```
|
||||
|
||||
然后在 `internal/connection/connection.go` 的 `ConnectionConfig` 结构体中添加字段:
|
||||
|
||||
```go
|
||||
type ConnectionConfig struct {
|
||||
// ... 现有字段
|
||||
SSLMode string `json:"sslMode,omitempty"` // SSL 模式:disable, require, verify-ca, verify-full
|
||||
}
|
||||
```
|
||||
|
||||
前端 UI 也需要相应添加 sslmode 选择控件。
|
||||
|
||||
### 测试建议
|
||||
|
||||
修改后请务必测试:
|
||||
|
||||
1. **SSL 启用测试**:
|
||||
- 连接配置了 SSL 的 HighGo 服务器
|
||||
- 验证连接成功
|
||||
|
||||
2. **SSL 禁用测试**:
|
||||
- 连接未配置 SSL 的 HighGo 服务器
|
||||
- 验证是否会报错(如果设置为 `require` 会报错)
|
||||
|
||||
3. **兼容性测试**:
|
||||
- 测试现有的 HighGo 连接配置是否仍然可用
|
||||
|
||||
## 二、其他可选优化
|
||||
|
||||
### 1. 默认端口提示优化
|
||||
|
||||
**文件**:`frontend/src/components/ConnectionModal.tsx`
|
||||
|
||||
**当前状态**:HighGo 的默认端口已正确设置为 5866
|
||||
|
||||
**建议**:无需修改,已符合官方规范
|
||||
|
||||
### 2. 默认数据库名称
|
||||
|
||||
**文件**:`internal/db/highgo_impl.go:33`
|
||||
|
||||
**当前代码**:
|
||||
```go
|
||||
if dbname == "" {
|
||||
dbname = "highgo" // HighGo default database
|
||||
}
|
||||
```
|
||||
|
||||
**建议**:无需修改,已符合官方规范(默认数据库为 `highgo`)
|
||||
|
||||
### 3. 默认用户名
|
||||
|
||||
**当前状态**:未在代码中硬编码默认用户名
|
||||
|
||||
**瀚高官方默认**:`sysdba`
|
||||
|
||||
**建议**:
|
||||
- 可以在前端 UI 的 HighGo 连接表单中,将用户名输入框的 placeholder 设置为 `sysdba`
|
||||
- 但不建议硬编码默认值,让用户自行输入更安全
|
||||
|
||||
## 三、总结
|
||||
|
||||
### 必须修改的项目
|
||||
- ✅ **无**(当前代码已基本符合规范)
|
||||
|
||||
### 建议修改的项目
|
||||
1. **sslmode 配置**(根据实际环境决定)
|
||||
- 开发环境:保持 `disable`
|
||||
- 生产环境:修改为 `require`
|
||||
|
||||
### 可选优化的项目
|
||||
1. 将 sslmode 改为可配置(需要修改前后端)
|
||||
2. 前端 UI 添加 sslmode 选择控件
|
||||
3. 用户名输入框添加 `sysdba` 提示
|
||||
|
||||
## 四、修改优先级
|
||||
|
||||
**优先级 1(高)**:
|
||||
- 集成瀚高 SM3 驱动(参考 `HighGo_SM3_Integration_Guide.md`)
|
||||
|
||||
**优先级 2(中)**:
|
||||
- 根据部署环境调整 sslmode 配置
|
||||
|
||||
**优先级 3(低)**:
|
||||
- 将 sslmode 改为可配置
|
||||
- UI 优化(placeholder 提示等)
|
||||
|
||||
## 五、下一步行动
|
||||
|
||||
建议按以下顺序执行:
|
||||
|
||||
1. **先集成 SM3 驱动**(参考集成指南)
|
||||
2. **测试基本连接功能**(使用 sslmode=disable)
|
||||
3. **如果生产环境需要 SSL**,再修改 sslmode 配置
|
||||
4. **验证所有功能正常**后,考虑可选优化项
|
||||
|
||||
---
|
||||
|
||||
**注意**:所有代码修改都应该在集成 SM3 驱动并验证基本功能正常后再进行。
|
||||
196
docs/HighGo_SM3_Integration_Guide.md
Normal file
196
docs/HighGo_SM3_Integration_Guide.md
Normal file
@@ -0,0 +1,196 @@
|
||||
# HighGo SM3 国密驱动集成指南
|
||||
|
||||
## 一、背景说明
|
||||
|
||||
HighGo(瀚高)数据库需要使用支持 SM3 国密认证的 PostgreSQL 驱动。瀚高官方提供了基于 `lib/pq` 的安全增强版本。
|
||||
|
||||
## 二、集成步骤
|
||||
|
||||
### 步骤 1:下载瀚高 pq 驱动
|
||||
|
||||
1. 访问百度网盘链接:
|
||||
```
|
||||
https://pan.baidu.com/s/1xuz6uJz0utRgKWecXhpOiA?pwd=o0tj
|
||||
```
|
||||
|
||||
2. 下载驱动源码压缩包
|
||||
|
||||
### 步骤 2:放置驱动源码
|
||||
|
||||
1. 在项目根目录创建目录(如果不存在):
|
||||
```bash
|
||||
mkdir -p third_party/highgo-pq
|
||||
```
|
||||
|
||||
2. 解压下载的驱动源码到 `third_party/highgo-pq/` 目录
|
||||
|
||||
3. 确保目录结构如下:
|
||||
```
|
||||
GoNavi/
|
||||
├── third_party/
|
||||
│ └── highgo-pq/
|
||||
│ ├── go.mod
|
||||
│ ├── conn.go
|
||||
│ ├── ... (其他 pq 驱动源文件)
|
||||
```
|
||||
|
||||
### 步骤 3:修改 go.mod
|
||||
|
||||
在 `go.mod` 中添加独立的 HighGo 驱动依赖与本地替换:
|
||||
|
||||
```go
|
||||
require github.com/highgo/pq-sm3 v0.0.0
|
||||
replace github.com/highgo/pq-sm3 => ./third_party/highgo-pq
|
||||
```
|
||||
|
||||
完整示例:
|
||||
```go
|
||||
module GoNavi-Wails
|
||||
|
||||
go 1.24.3
|
||||
|
||||
require (
|
||||
// ... 现有依赖
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
// ... 其他依赖
|
||||
)
|
||||
|
||||
// 在文件末尾添加
|
||||
replace github.com/highgo/pq-sm3 => ./third_party/highgo-pq
|
||||
```
|
||||
|
||||
并将 `third_party/highgo-pq/go.mod` 的 module 修改为:
|
||||
|
||||
```go
|
||||
module github.com/highgo/pq-sm3
|
||||
```
|
||||
|
||||
同时在驱动源码中把注册名改为 `highgo`,确保不覆盖 `postgres`:
|
||||
|
||||
```go
|
||||
sql.Register("highgo", &Driver{})
|
||||
```
|
||||
|
||||
### 步骤 4:更新 HighGo 连接配置(可选)
|
||||
|
||||
根据瀚高官方文档,建议修改 `internal/db/highgo_impl.go:43` 的 sslmode:
|
||||
|
||||
**当前代码**:
|
||||
```go
|
||||
q.Set("sslmode", "disable")
|
||||
```
|
||||
|
||||
**建议修改为**(瀚高默认):
|
||||
```go
|
||||
q.Set("sslmode", "require")
|
||||
```
|
||||
|
||||
> ⚠️ 注意:如果您的 HighGo 服务器未配置 SSL,保持 `disable` 即可。
|
||||
|
||||
### 步骤 5:验证集成
|
||||
|
||||
1. 清理依赖缓存:
|
||||
```bash
|
||||
go clean -modcache
|
||||
```
|
||||
|
||||
2. 重新下载依赖:
|
||||
```bash
|
||||
go mod download
|
||||
```
|
||||
|
||||
3. 编译项目:
|
||||
```bash
|
||||
go build ./...
|
||||
```
|
||||
|
||||
4. 测试 HighGo 连接:
|
||||
- 启动应用
|
||||
- 创建 HighGo 连接
|
||||
- 测试连接是否成功
|
||||
|
||||
## 三、重要说明
|
||||
|
||||
### ⚠️ 影响范围
|
||||
|
||||
采用独立驱动名后,影响范围如下:
|
||||
|
||||
1. **PostgreSQL 继续使用原生 `github.com/lib/pq`**
|
||||
2. **HighGo 使用 `github.com/highgo/pq-sm3`(本地替换到官方源码)**
|
||||
3. 两条连接链路互不覆盖,降低兼容性风险
|
||||
|
||||
### 兼容性验证
|
||||
|
||||
集成后,请务必测试:
|
||||
|
||||
1. ✅ HighGo 数据库连接(SM3 认证)
|
||||
2. ✅ 标准 PostgreSQL 连接(确保仍然可用)
|
||||
|
||||
若 PostgreSQL 或 HighGo 任一连接异常,优先检查驱动注册名与 `go.mod` replace 是否一致。
|
||||
|
||||
### 回滚方案
|
||||
|
||||
如果集成后出现问题,可以快速回滚:
|
||||
|
||||
1. 删除 `go.mod` 中的 replace 指令
|
||||
2. 删除 `go.mod` 中 `github.com/highgo/pq-sm3` 的 require
|
||||
3. 删除 `third_party/highgo-pq/` 目录
|
||||
4. 运行 `go mod tidy`
|
||||
5. 重新编译
|
||||
|
||||
## 四、瀚高驱动特性
|
||||
|
||||
根据官方文档:
|
||||
|
||||
- **项目内包路径**:`github.com/highgo/pq-sm3`(映射到本地 `third_party/highgo-pq`)
|
||||
- **驱动名**:`highgo`(项目内独立注册,避免覆盖 `postgres`)
|
||||
- **SM3 支持**:自动启用国密认证
|
||||
- **默认端口**:5866
|
||||
- **默认数据库**:`highgo`
|
||||
- **默认用户**:`sysdba`
|
||||
- **sslmode 默认**:`require`
|
||||
|
||||
## 五、故障排查
|
||||
|
||||
### 问题 1:编译失败
|
||||
|
||||
**现象**:`go build` 报错找不到 `github.com/highgo/pq-sm3`
|
||||
|
||||
**解决**:
|
||||
1. 检查 `third_party/highgo-pq/` 目录是否存在
|
||||
2. 检查 `go.mod` 中 `github.com/highgo/pq-sm3` 的 require/replace 是否正确
|
||||
3. 运行 `go mod download`
|
||||
|
||||
### 问题 2:HighGo 连接失败
|
||||
|
||||
**现象**:连接 HighGo 时报认证错误
|
||||
|
||||
**解决**:
|
||||
1. 确认瀚高驱动已正确替换(检查 `go.mod`)
|
||||
2. 确认项目内驱动注册名为 `highgo`
|
||||
3. 确认 HighGo 服务器支持 SM3 认证
|
||||
4. 检查用户名、密码、端口是否正确
|
||||
|
||||
### 问题 3:PostgreSQL 连接失败
|
||||
|
||||
**现象**:集成后标准 PostgreSQL 无法连接
|
||||
|
||||
**解决**:
|
||||
1. 检查是否误将 `github.com/lib/pq` 全局 replace 到 HighGo 驱动
|
||||
2. 确认 PostgreSQL 仍使用 `sql.Open("postgres", dsn)`
|
||||
3. 确认 HighGo 使用 `sql.Open("highgo", dsn)`
|
||||
|
||||
## 六、后续优化建议
|
||||
|
||||
如果后续需要增强,可考虑:
|
||||
|
||||
1. 将 HighGo `sslmode` 做成可配置项(前后端联动)
|
||||
2. 增加 HighGo/PG 驱动链路健康检查项
|
||||
3. 联系瀚高技术支持确认 SM3 + SSL 最佳参数组合
|
||||
|
||||
## 七、参考资料
|
||||
|
||||
- 瀚高官方文档:https://www.highgo.com/document/zh-cn/application/pq%E6%8E%A5%E5%8F%A3.html
|
||||
- 瀚高驱动下载:https://pan.baidu.com/s/1xuz6uJz0utRgKWecXhpOiA?pwd=o0tj
|
||||
- 标准 lib/pq:https://github.com/lib/pq
|
||||
1
frontend/.gitignore
vendored
Normal file
1
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.ace-tool/
|
||||
@@ -2,7 +2,7 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<link rel="icon" type="image/svg+xml" href="/logo.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>GoNavi</title>
|
||||
</head>
|
||||
@@ -10,4 +10,4 @@
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
52
frontend/public/logo.svg
Normal file
52
frontend/public/logo.svg
Normal file
@@ -0,0 +1,52 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512">
|
||||
<defs>
|
||||
<!-- Background: Soft Light Grey -->
|
||||
<linearGradient id="bgSoft" x1="0%" y1="0%" x2="0%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#f5f7fa;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#c3cfe2;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<!-- Hexagon: Solid Tech Pink -->
|
||||
<linearGradient id="solidPink" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#FF5F6D;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#FFC371;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<!-- N: Solid Tech Blue/Cyan -->
|
||||
<linearGradient id="solidCyan" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#00c6ff;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#0072ff;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<filter id="hardShadow" x="-20%" y="-20%" width="140%" height="140%">
|
||||
<feGaussianBlur in="SourceAlpha" stdDeviation="4"/>
|
||||
<feOffset dx="4" dy="4" result="offsetblur"/>
|
||||
<feComponentTransfer>
|
||||
<feFuncA type="linear" slope="0.2"/>
|
||||
</feComponentTransfer>
|
||||
<feMerge>
|
||||
<feMergeNode/>
|
||||
<feMergeNode in="SourceGraphic"/>
|
||||
</feMerge>
|
||||
</filter>
|
||||
</defs>
|
||||
|
||||
<!-- Background -->
|
||||
<rect x="32" y="32" width="448" height="448" rx="100" fill="url(#bgSoft)" />
|
||||
|
||||
<!-- Main Content Centered -->
|
||||
<g transform="translate(106, 106) scale(0.6)" filter="url(#hardShadow)">
|
||||
|
||||
<!-- Hex G -->
|
||||
<path d="M 250 0 L 466 125 L 466 375 L 250 500 L 34 375 L 34 125 Z"
|
||||
fill="none" stroke="url(#solidPink)" stroke-width="45" stroke-linejoin="round"/>
|
||||
|
||||
<!-- G Crossbar -->
|
||||
<path d="M 466 300 L 330 300" stroke="url(#solidPink)" stroke-width="45" stroke-linecap="round"/>
|
||||
|
||||
<!-- Inner N -->
|
||||
<path d="M 160 350 L 160 150 L 340 350 L 340 150"
|
||||
fill="none" stroke="url(#solidCyan)" stroke-width="50" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
@@ -3,6 +3,11 @@ html, body, #root {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
overflow: hidden; /* Disable global scrollbar */
|
||||
background-color: transparent !important; /* CRITICAL: Allow Wails window transparency */
|
||||
}
|
||||
|
||||
body, #root {
|
||||
border-radius: 14px; /* Slightly rounded app window corners */
|
||||
}
|
||||
|
||||
/* 侧边栏 Tree 样式优化 */
|
||||
@@ -30,4 +35,60 @@ html, body, #root {
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
padding-right: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
/* Scrollbar styling for dark mode */
|
||||
body[data-theme='dark'] ::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
body[data-theme='dark'] ::-webkit-scrollbar-track {
|
||||
background: #1f1f1f;
|
||||
}
|
||||
body[data-theme='dark'] ::-webkit-scrollbar-corner {
|
||||
background: #1f1f1f;
|
||||
}
|
||||
body[data-theme='dark'] ::-webkit-scrollbar-thumb {
|
||||
background: #424242;
|
||||
border-radius: 4px;
|
||||
border: 2px solid #1f1f1f;
|
||||
}
|
||||
body[data-theme='dark'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: #666;
|
||||
}
|
||||
|
||||
/* Ensure body background matches theme to avoid white flashes, but kept transparent for window composition */
|
||||
body {
|
||||
transition: color 0.3s;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] {
|
||||
/* 移除全局 text-shadow:对每个文本元素增加 GPU compositing 成本,
|
||||
在透明窗口环境下会显著加剧 GPU 负载 */
|
||||
}
|
||||
|
||||
/* 连接配置弹窗:滚动仅在弹窗 body 内部,不使用外层 wrap 滚动条 */
|
||||
.connection-modal-wrap {
|
||||
overflow: hidden !important;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-content {
|
||||
max-height: calc(100vh - 72px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-body {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.connection-modal-wrap .ant-modal-footer {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
/* Custom Title Bar Close Button Hover */
|
||||
.titlebar-close-btn:hover {
|
||||
background-color: #ff4d4f !important;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Layout, Button, ConfigProvider, theme, Dropdown, MenuProps, message } from 'antd';
|
||||
import { Layout, Button, ConfigProvider, theme, Dropdown, MenuProps, message, Modal, Spin, Slider, Progress } from 'antd';
|
||||
import zhCN from 'antd/locale/zh_CN';
|
||||
import { PlusOutlined, BulbOutlined, BulbFilled, ConsoleSqlOutlined, BugOutlined, SettingOutlined, UploadOutlined, DownloadOutlined } from '@ant-design/icons';
|
||||
import { PlusOutlined, BulbOutlined, BulbFilled, ConsoleSqlOutlined, UploadOutlined, DownloadOutlined, CloudDownloadOutlined, BugOutlined, ToolOutlined, InfoCircleOutlined, GithubOutlined, SkinOutlined, CheckOutlined, MinusOutlined, BorderOutlined, CloseOutlined, SettingOutlined } from '@ant-design/icons';
|
||||
import { Environment, EventsOn } from '../wailsjs/runtime/runtime';
|
||||
import Sidebar from './components/Sidebar';
|
||||
import TabManager from './components/TabManager';
|
||||
import ConnectionModal from './components/ConnectionModal';
|
||||
@@ -9,6 +10,8 @@ import DataSyncModal from './components/DataSyncModal';
|
||||
import LogPanel from './components/LogPanel';
|
||||
import { useStore } from './store';
|
||||
import { SavedConnection } from './types';
|
||||
import { blurToFilter, normalizeBlurForPlatform, normalizeOpacityForPlatform, isWindowsPlatform } from './utils/appearance';
|
||||
import { SetWindowTranslucency } from '../wailsjs/go/app/App';
|
||||
import './App.css';
|
||||
|
||||
const { Sider, Content } = Layout;
|
||||
@@ -17,7 +20,264 @@ function App() {
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [isSyncModalOpen, setIsSyncModalOpen] = useState(false);
|
||||
const [editingConnection, setEditingConnection] = useState<SavedConnection | null>(null);
|
||||
const { darkMode, toggleDarkMode, addTab, activeContext, connections, addConnection, tabs, activeTabId } = useStore();
|
||||
const themeMode = useStore(state => state.theme);
|
||||
const setTheme = useStore(state => state.setTheme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const setAppearance = useStore(state => state.setAppearance);
|
||||
const darkMode = themeMode === 'dark';
|
||||
const effectiveOpacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const effectiveBlur = normalizeBlurForPlatform(appearance.blur);
|
||||
const blurFilter = blurToFilter(effectiveBlur);
|
||||
const windowCornerRadius = 14;
|
||||
const [isLinuxRuntime, setIsLinuxRuntime] = useState(false);
|
||||
|
||||
// 同步 macOS 窗口透明度:opacity=1.0 且 blur=0 时关闭 NSVisualEffectView,
|
||||
// 避免 GPU 持续计算窗口背后的模糊合成
|
||||
useEffect(() => {
|
||||
SetWindowTranslucency(appearance.opacity, appearance.blur).catch(() => {});
|
||||
}, [appearance.opacity, appearance.blur]);
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
Environment()
|
||||
.then((env) => {
|
||||
if (cancelled) return;
|
||||
setIsLinuxRuntime((env?.platform || '').toLowerCase() === 'linux');
|
||||
})
|
||||
.catch(() => {
|
||||
if (cancelled) return;
|
||||
const platform = typeof navigator !== 'undefined' ? navigator.platform : '';
|
||||
setIsLinuxRuntime(/linux/i.test(platform));
|
||||
});
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Background Helper
|
||||
const getBg = (darkHex: string, lightHex: string) => {
|
||||
if (!darkMode) return `rgba(255, 255, 255, ${effectiveOpacity})`; // Light mode usually white
|
||||
|
||||
// Parse hex to rgb
|
||||
const hex = darkHex.replace('#', '');
|
||||
const r = parseInt(hex.substring(0, 2), 16);
|
||||
const g = parseInt(hex.substring(2, 4), 16);
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${effectiveOpacity})`;
|
||||
};
|
||||
// Specific colors
|
||||
const bgMain = getBg('#141414', '#ffffff');
|
||||
const bgContent = getBg('#1d1d1d', '#ffffff');
|
||||
|
||||
const addTab = useStore(state => state.addTab);
|
||||
const activeContext = useStore(state => state.activeContext);
|
||||
const connections = useStore(state => state.connections);
|
||||
const addConnection = useStore(state => state.addConnection);
|
||||
const tabs = useStore(state => state.tabs);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
const updateCheckInFlightRef = React.useRef(false);
|
||||
const updateDownloadInFlightRef = React.useRef(false);
|
||||
const updateDownloadedVersionRef = React.useRef<string | null>(null);
|
||||
const updateDownloadMetaRef = React.useRef<UpdateDownloadResultData | null>(null);
|
||||
const updateDeferredVersionRef = React.useRef<string | null>(null);
|
||||
const updateNotifiedVersionRef = React.useRef<string | null>(null);
|
||||
const updateMutedVersionRef = React.useRef<string | null>(null);
|
||||
const [isAboutOpen, setIsAboutOpen] = useState(false);
|
||||
const [aboutLoading, setAboutLoading] = useState(false);
|
||||
const [aboutInfo, setAboutInfo] = useState<{ version: string; author: string; buildTime?: string; repoUrl?: string; issueUrl?: string; releaseUrl?: string } | null>(null);
|
||||
const [aboutUpdateStatus, setAboutUpdateStatus] = useState<string>('');
|
||||
const [lastUpdateInfo, setLastUpdateInfo] = useState<UpdateInfo | null>(null);
|
||||
const [updateDownloadProgress, setUpdateDownloadProgress] = useState<{
|
||||
open: boolean;
|
||||
version: string;
|
||||
status: 'idle' | 'start' | 'downloading' | 'done' | 'error';
|
||||
percent: number;
|
||||
downloaded: number;
|
||||
total: number;
|
||||
message: string;
|
||||
}>({
|
||||
open: false,
|
||||
version: '',
|
||||
status: 'idle',
|
||||
percent: 0,
|
||||
downloaded: 0,
|
||||
total: 0,
|
||||
message: ''
|
||||
});
|
||||
|
||||
type UpdateInfo = {
|
||||
hasUpdate: boolean;
|
||||
currentVersion: string;
|
||||
latestVersion: string;
|
||||
releaseName?: string;
|
||||
releaseNotesUrl?: string;
|
||||
assetName?: string;
|
||||
assetUrl?: string;
|
||||
assetSize?: number;
|
||||
sha256?: string;
|
||||
};
|
||||
|
||||
type UpdateDownloadProgressEvent = {
|
||||
status?: 'start' | 'downloading' | 'done' | 'error';
|
||||
percent?: number;
|
||||
downloaded?: number;
|
||||
total?: number;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
type UpdateDownloadResultData = {
|
||||
info?: UpdateInfo;
|
||||
downloadPath?: string;
|
||||
installLogPath?: string;
|
||||
installTarget?: string;
|
||||
platform?: string;
|
||||
autoRelaunch?: boolean;
|
||||
};
|
||||
|
||||
const formatBytes = (bytes?: number) => {
|
||||
if (!bytes || bytes <= 0) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
let value = bytes;
|
||||
let idx = 0;
|
||||
while (value >= 1024 && idx < units.length - 1) {
|
||||
value /= 1024;
|
||||
idx++;
|
||||
}
|
||||
return `${value.toFixed(idx === 0 ? 0 : 1)} ${units[idx]}`;
|
||||
};
|
||||
|
||||
const promptRestartForUpdate = (info: UpdateInfo, resultData?: UpdateDownloadResultData) => {
|
||||
const downloadPathHint = resultData?.downloadPath
|
||||
? `更新包路径:${resultData.downloadPath}`
|
||||
: '';
|
||||
const installLogHint = resultData?.installLogPath
|
||||
? `安装日志:${resultData.installLogPath}`
|
||||
: '';
|
||||
Modal.confirm({
|
||||
title: '更新已下载',
|
||||
content: (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 6, userSelect: 'text' }}>
|
||||
<div>{`版本 ${info.latestVersion} 已下载完成,是否现在重启完成更新?`}</div>
|
||||
{downloadPathHint ? <div style={{ fontSize: 12, color: '#8c8c8c' }}>{downloadPathHint}</div> : null}
|
||||
{installLogHint ? <div style={{ fontSize: 12, color: '#8c8c8c' }}>{installLogHint}</div> : null}
|
||||
</div>
|
||||
),
|
||||
okText: '立即重启',
|
||||
cancelText: '稍后',
|
||||
onOk: async () => {
|
||||
updateDeferredVersionRef.current = null;
|
||||
const res = await (window as any).go.app.App.InstallUpdateAndRestart();
|
||||
if (!res?.success) {
|
||||
message.error('更新安装失败: ' + (res?.message || '未知错误'));
|
||||
}
|
||||
},
|
||||
onCancel: () => {
|
||||
updateDeferredVersionRef.current = info.latestVersion;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const downloadUpdate = React.useCallback(async (info: UpdateInfo, silent: boolean) => {
|
||||
if (updateDownloadInFlightRef.current) return;
|
||||
if (updateDownloadedVersionRef.current === info.latestVersion) {
|
||||
if (!silent) {
|
||||
const cachedDownloadPath = updateDownloadMetaRef.current?.downloadPath;
|
||||
message.info(cachedDownloadPath ? `更新包已就绪(${info.latestVersion}),路径:${cachedDownloadPath}` : `更新包已就绪(${info.latestVersion})`);
|
||||
}
|
||||
if (!silent || updateDeferredVersionRef.current !== info.latestVersion) {
|
||||
promptRestartForUpdate(info, updateDownloadMetaRef.current || undefined);
|
||||
}
|
||||
return;
|
||||
}
|
||||
updateDownloadInFlightRef.current = true;
|
||||
updateDownloadMetaRef.current = null;
|
||||
const key = 'update-download';
|
||||
setUpdateDownloadProgress({
|
||||
open: true,
|
||||
version: info.latestVersion,
|
||||
status: 'start',
|
||||
percent: 0,
|
||||
downloaded: 0,
|
||||
total: info.assetSize || 0,
|
||||
message: ''
|
||||
});
|
||||
message.loading({ content: `正在下载更新 ${info.latestVersion}...`, key, duration: 0 });
|
||||
const res = await (window as any).go.app.App.DownloadUpdate();
|
||||
updateDownloadInFlightRef.current = false;
|
||||
if (res?.success) {
|
||||
const resultData = (res?.data || {}) as UpdateDownloadResultData;
|
||||
updateDownloadMetaRef.current = resultData;
|
||||
updateDownloadedVersionRef.current = info.latestVersion;
|
||||
setUpdateDownloadProgress(prev => ({ ...prev, status: 'done', percent: 100, open: false }));
|
||||
if (resultData?.downloadPath) {
|
||||
message.success({ content: `更新下载完成,更新包路径:${resultData.downloadPath}`, key, duration: 5 });
|
||||
} else {
|
||||
message.success({ content: '更新下载完成', key, duration: 2 });
|
||||
}
|
||||
setAboutUpdateStatus(`发现新版本 ${info.latestVersion}(已下载,待重启安装)`);
|
||||
if (!silent || updateDeferredVersionRef.current !== info.latestVersion) {
|
||||
promptRestartForUpdate(info, resultData);
|
||||
}
|
||||
} else {
|
||||
setUpdateDownloadProgress(prev => ({
|
||||
...prev,
|
||||
status: 'error',
|
||||
message: res?.message || '未知错误'
|
||||
}));
|
||||
message.error({ content: '更新下载失败: ' + (res?.message || '未知错误'), key, duration: 4 });
|
||||
}
|
||||
}, []);
|
||||
|
||||
const checkForUpdates = React.useCallback(async (silent: boolean) => {
|
||||
if (updateCheckInFlightRef.current) return;
|
||||
updateCheckInFlightRef.current = true;
|
||||
if (!silent) {
|
||||
setAboutUpdateStatus('正在检查更新...');
|
||||
}
|
||||
const res = await (window as any).go.app.App.CheckForUpdates();
|
||||
updateCheckInFlightRef.current = false;
|
||||
if (!res?.success) {
|
||||
if (!silent) {
|
||||
message.error('检查更新失败: ' + (res?.message || '未知错误'));
|
||||
setAboutUpdateStatus('检查更新失败: ' + (res?.message || '未知错误'));
|
||||
}
|
||||
return;
|
||||
}
|
||||
const info: UpdateInfo = res.data;
|
||||
if (!info) return;
|
||||
setLastUpdateInfo(info);
|
||||
if (info.hasUpdate) {
|
||||
if (!silent) {
|
||||
message.info(`发现新版本 ${info.latestVersion}`);
|
||||
setAboutUpdateStatus(`发现新版本 ${info.latestVersion}(未下载)`);
|
||||
}
|
||||
if (silent && isAboutOpen) {
|
||||
setAboutUpdateStatus(`发现新版本 ${info.latestVersion}(未下载)`);
|
||||
}
|
||||
if (silent && !isAboutOpen && updateMutedVersionRef.current !== info.latestVersion && updateNotifiedVersionRef.current !== info.latestVersion) {
|
||||
updateNotifiedVersionRef.current = info.latestVersion;
|
||||
setIsAboutOpen(true);
|
||||
}
|
||||
} else if (!silent) {
|
||||
const text = `当前已是最新版本(${info.currentVersion || '未知'})`;
|
||||
message.success(text);
|
||||
setAboutUpdateStatus(text);
|
||||
} else if (silent && isAboutOpen) {
|
||||
const text = `当前已是最新版本(${info.currentVersion || '未知'})`;
|
||||
setAboutUpdateStatus(text);
|
||||
}
|
||||
}, [downloadUpdate]);
|
||||
|
||||
const loadAboutInfo = React.useCallback(async () => {
|
||||
setAboutLoading(true);
|
||||
const res = await (window as any).go.app.App.GetAppInfo();
|
||||
if (res?.success) {
|
||||
setAboutInfo(res.data);
|
||||
} else {
|
||||
message.error('获取应用信息失败: ' + (res?.message || '未知错误'));
|
||||
}
|
||||
setAboutLoading(false);
|
||||
}, []);
|
||||
|
||||
const handleNewQuery = () => {
|
||||
let connId = activeContext?.connectionId || '';
|
||||
@@ -37,7 +297,8 @@ function App() {
|
||||
title: '新建查询',
|
||||
type: 'query',
|
||||
connectionId: connId,
|
||||
dbName: db
|
||||
dbName: db,
|
||||
query: ''
|
||||
});
|
||||
};
|
||||
|
||||
@@ -79,13 +340,7 @@ function App() {
|
||||
}
|
||||
};
|
||||
|
||||
const settingsMenu: MenuProps['items'] = [
|
||||
{
|
||||
key: 'sync',
|
||||
label: '数据同步',
|
||||
icon: <UploadOutlined rotate={90} />,
|
||||
onClick: () => setIsSyncModalOpen(true)
|
||||
},
|
||||
const toolsMenu: MenuProps['items'] = [
|
||||
{
|
||||
key: 'import',
|
||||
label: '导入连接配置',
|
||||
@@ -97,11 +352,47 @@ function App() {
|
||||
label: '导出连接配置',
|
||||
icon: <DownloadOutlined />,
|
||||
onClick: handleExportConnections
|
||||
},
|
||||
{
|
||||
key: 'sync',
|
||||
label: '数据同步',
|
||||
icon: <UploadOutlined rotate={90} />,
|
||||
onClick: () => setIsSyncModalOpen(true)
|
||||
}
|
||||
];
|
||||
|
||||
// Log Panel
|
||||
const [logPanelHeight, setLogPanelHeight] = useState(200);
|
||||
const themeMenu: MenuProps['items'] = [
|
||||
{
|
||||
key: 'light',
|
||||
label: '亮色主题',
|
||||
icon: themeMode === 'light' ? <CheckOutlined /> : undefined,
|
||||
onClick: () => setTheme('light')
|
||||
},
|
||||
{
|
||||
key: 'dark',
|
||||
label: '暗色主题',
|
||||
icon: themeMode === 'dark' ? <CheckOutlined /> : undefined,
|
||||
onClick: () => setTheme('dark')
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: 'settings',
|
||||
label: '外观设置...',
|
||||
icon: <SettingOutlined />,
|
||||
onClick: () => setIsAppearanceModalOpen(true)
|
||||
}
|
||||
];
|
||||
|
||||
const [isAppearanceModalOpen, setIsAppearanceModalOpen] = useState(false);
|
||||
|
||||
|
||||
// Log Panel: 最小高度按“工具栏 + 1 条日志行(微增)”限制
|
||||
const LOG_PANEL_TOOLBAR_HEIGHT = 32;
|
||||
const LOG_PANEL_SINGLE_ROW_HEIGHT = 39;
|
||||
const LOG_PANEL_MIN_VISIBLE_ROWS = 1;
|
||||
const LOG_PANEL_MIN_HEIGHT = LOG_PANEL_TOOLBAR_HEIGHT + (LOG_PANEL_SINGLE_ROW_HEIGHT * LOG_PANEL_MIN_VISIBLE_ROWS);
|
||||
const LOG_PANEL_MAX_HEIGHT = 800;
|
||||
const [logPanelHeight, setLogPanelHeight] = useState(Math.max(200, LOG_PANEL_MIN_HEIGHT));
|
||||
const [isLogPanelOpen, setIsLogPanelOpen] = useState(false);
|
||||
const logResizeRef = React.useRef<{ startY: number, startHeight: number } | null>(null);
|
||||
const logGhostRef = React.useRef<HTMLDivElement>(null);
|
||||
@@ -130,7 +421,10 @@ function App() {
|
||||
const handleLogResizeUp = (e: MouseEvent) => {
|
||||
if (logResizeRef.current) {
|
||||
const delta = logResizeRef.current.startY - e.clientY;
|
||||
const newHeight = Math.max(100, Math.min(800, logResizeRef.current.startHeight + delta));
|
||||
const newHeight = Math.max(
|
||||
LOG_PANEL_MIN_HEIGHT,
|
||||
Math.min(LOG_PANEL_MAX_HEIGHT, logResizeRef.current.startHeight + delta)
|
||||
);
|
||||
setLogPanelHeight(newHeight);
|
||||
}
|
||||
|
||||
@@ -152,6 +446,14 @@ function App() {
|
||||
setIsModalOpen(false);
|
||||
setEditingConnection(null);
|
||||
};
|
||||
|
||||
const handleTitleBarDoubleClick = (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
const target = e.target as HTMLElement | null;
|
||||
if (target?.closest('[data-no-titlebar-toggle="true"]')) {
|
||||
return;
|
||||
}
|
||||
(window as any).runtime.WindowToggleMaximise();
|
||||
};
|
||||
|
||||
// Sidebar Resizing
|
||||
const [sidebarWidth, setSidebarWidth] = useState(300);
|
||||
@@ -214,52 +516,221 @@ function App() {
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (darkMode) {
|
||||
document.body.style.backgroundColor = '#141414';
|
||||
document.body.style.color = '#ffffff';
|
||||
} else {
|
||||
document.body.style.backgroundColor = '#ffffff';
|
||||
document.body.style.color = '#000000';
|
||||
}
|
||||
document.body.style.backgroundColor = 'transparent';
|
||||
document.body.style.color = darkMode ? '#ffffff' : '#000000';
|
||||
document.body.setAttribute('data-theme', darkMode ? 'dark' : 'light');
|
||||
}, [darkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isAboutOpen) {
|
||||
if (lastUpdateInfo?.hasUpdate) {
|
||||
setAboutUpdateStatus(`发现新版本 ${lastUpdateInfo.latestVersion}(未下载)`);
|
||||
} else if (lastUpdateInfo) {
|
||||
setAboutUpdateStatus(`当前已是最新版本(${lastUpdateInfo.currentVersion || '未知'})`);
|
||||
} else {
|
||||
setAboutUpdateStatus('未检查');
|
||||
}
|
||||
loadAboutInfo();
|
||||
}
|
||||
}, [isAboutOpen, lastUpdateInfo, loadAboutInfo]);
|
||||
|
||||
useEffect(() => {
|
||||
const startupTimer = window.setTimeout(() => {
|
||||
checkForUpdates(true);
|
||||
}, 2000);
|
||||
const interval = window.setInterval(() => {
|
||||
checkForUpdates(true);
|
||||
}, 30 * 60 * 1000);
|
||||
return () => {
|
||||
window.clearTimeout(startupTimer);
|
||||
window.clearInterval(interval);
|
||||
};
|
||||
}, [checkForUpdates]);
|
||||
|
||||
useEffect(() => {
|
||||
const offDownloadProgress = EventsOn('update:download-progress', (event: UpdateDownloadProgressEvent) => {
|
||||
if (!event) return;
|
||||
const status = event.status || 'downloading';
|
||||
const nextStatus: 'idle' | 'start' | 'downloading' | 'done' | 'error' =
|
||||
status === 'start' || status === 'downloading' || status === 'done' || status === 'error'
|
||||
? status
|
||||
: 'downloading';
|
||||
const downloaded = typeof event.downloaded === 'number' ? event.downloaded : 0;
|
||||
const total = typeof event.total === 'number' ? event.total : 0;
|
||||
const percentRaw = typeof event.percent === 'number'
|
||||
? event.percent
|
||||
: (total > 0 ? (downloaded / total) * 100 : 0);
|
||||
const percent = Math.max(0, Math.min(100, percentRaw));
|
||||
setUpdateDownloadProgress(prev => ({
|
||||
open: nextStatus === 'start' || nextStatus === 'downloading' || nextStatus === 'error',
|
||||
version: prev.version,
|
||||
status: nextStatus,
|
||||
percent,
|
||||
downloaded,
|
||||
total,
|
||||
message: String(event.message || '')
|
||||
}));
|
||||
});
|
||||
return () => {
|
||||
offDownloadProgress();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const linuxResizeHandleStyleBase = {
|
||||
position: 'fixed',
|
||||
zIndex: 12000,
|
||||
background: 'transparent',
|
||||
WebkitAppRegion: 'drag',
|
||||
'--wails-draggable': 'drag',
|
||||
userSelect: 'none'
|
||||
} as any;
|
||||
|
||||
const showLinuxResizeHandles = isLinuxRuntime;
|
||||
|
||||
return (
|
||||
<ConfigProvider
|
||||
locale={zhCN}
|
||||
theme={{
|
||||
algorithm: darkMode ? theme.darkAlgorithm : theme.defaultAlgorithm,
|
||||
token: {
|
||||
colorBgLayout: 'transparent',
|
||||
colorBgContainer: darkMode
|
||||
? `rgba(29, 29, 29, ${effectiveOpacity})`
|
||||
: `rgba(255, 255, 255, ${effectiveOpacity})`,
|
||||
colorBgElevated: darkMode
|
||||
? '#1f1f1f'
|
||||
: '#ffffff',
|
||||
colorFillAlter: darkMode
|
||||
? `rgba(38, 38, 38, ${effectiveOpacity})`
|
||||
: `rgba(250, 250, 250, ${effectiveOpacity})`,
|
||||
},
|
||||
components: {
|
||||
Layout: {
|
||||
colorBgBody: 'transparent',
|
||||
colorBgHeader: 'transparent',
|
||||
bodyBg: 'transparent',
|
||||
headerBg: 'transparent',
|
||||
siderBg: 'transparent',
|
||||
triggerBg: 'transparent'
|
||||
},
|
||||
Table: {
|
||||
headerBg: 'transparent',
|
||||
rowHoverBg: darkMode ? 'rgba(255, 255, 255, 0.08)' : 'rgba(0, 0, 0, 0.02)',
|
||||
},
|
||||
Tabs: {
|
||||
cardBg: 'transparent',
|
||||
itemActiveColor: darkMode ? '#177ddc' : '#1890ff',
|
||||
}
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Layout style={{ height: '100vh', overflow: 'hidden' }}>
|
||||
<Layout style={{
|
||||
height: '100vh',
|
||||
overflow: 'hidden',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
background: 'transparent',
|
||||
borderRadius: showLinuxResizeHandles ? 0 : windowCornerRadius,
|
||||
clipPath: showLinuxResizeHandles ? 'none' : `inset(0 round ${windowCornerRadius}px)`,
|
||||
backdropFilter: blurFilter,
|
||||
WebkitBackdropFilter: blurFilter,
|
||||
}}>
|
||||
{/* Custom Title Bar */}
|
||||
<div
|
||||
onDoubleClick={handleTitleBarDoubleClick}
|
||||
style={{
|
||||
height: 32,
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'space-between',
|
||||
background: bgMain,
|
||||
borderBottom: 'none',
|
||||
userSelect: 'none',
|
||||
WebkitAppRegion: 'drag', // Wails drag region
|
||||
'--wails-draggable': 'drag',
|
||||
paddingLeft: 16
|
||||
} as any}
|
||||
>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, fontWeight: 600 }}>
|
||||
{/* Logo can be added here if available */}
|
||||
GoNavi
|
||||
</div>
|
||||
<div
|
||||
data-no-titlebar-toggle="true"
|
||||
onDoubleClick={(e) => e.stopPropagation()}
|
||||
style={{ display: 'flex', height: '100%', WebkitAppRegion: 'no-drag', '--wails-draggable': 'no-drag' } as any}
|
||||
>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<MinusOutlined />}
|
||||
style={{ height: '100%', borderRadius: 0, width: 46 }}
|
||||
onClick={() => (window as any).runtime.WindowMinimise()}
|
||||
/>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<BorderOutlined />}
|
||||
style={{ height: '100%', borderRadius: 0, width: 46 }}
|
||||
onClick={() => (window as any).runtime.WindowToggleMaximise()}
|
||||
/>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<CloseOutlined />}
|
||||
danger
|
||||
className="titlebar-close-btn"
|
||||
style={{ height: '100%', borderRadius: 0, width: 46 }}
|
||||
onClick={() => (window as any).runtime.Quit()}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
style={{
|
||||
height: 36,
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'flex-start',
|
||||
gap: 4,
|
||||
padding: '0 8px',
|
||||
borderBottom: 'none',
|
||||
background: bgMain,
|
||||
}}
|
||||
>
|
||||
<Dropdown menu={{ items: toolsMenu }} placement="bottomLeft">
|
||||
<Button type="text" icon={<ToolOutlined />} title="工具">工具</Button>
|
||||
</Dropdown>
|
||||
<Dropdown menu={{ items: themeMenu }} placement="bottomLeft">
|
||||
<Button type="text" icon={<SkinOutlined />} title="主题">主题</Button>
|
||||
</Dropdown>
|
||||
<Button type="text" icon={<InfoCircleOutlined />} title="关于" onClick={() => setIsAboutOpen(true)}>关于</Button>
|
||||
</div>
|
||||
<Layout style={{ flex: 1, minHeight: 0, minWidth: 0 }}>
|
||||
<Sider
|
||||
theme={darkMode ? "dark" : "light"}
|
||||
width={sidebarWidth}
|
||||
style={{
|
||||
borderRight: darkMode ? '1px solid #303030' : '1px solid #f0f0f0',
|
||||
position: 'relative'
|
||||
borderRight: '1px solid rgba(128,128,128,0.2)',
|
||||
position: 'relative',
|
||||
background: bgMain
|
||||
}}
|
||||
>
|
||||
<div style={{ height: '100%', display: 'flex', flexDirection: 'column', overflow: 'hidden' }}>
|
||||
<div style={{ padding: '10px', borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0', display: 'flex', justifyContent: 'space-between', alignItems: 'center', flexShrink: 0 }}>
|
||||
<span style={{ fontWeight: 'bold', paddingLeft: 8 }}>GoNavi</span>
|
||||
<div style={{ padding: '10px', borderBottom: 'none', display: 'flex', justifyContent: 'flex-end', alignItems: 'center', flexShrink: 0 }}>
|
||||
|
||||
<div>
|
||||
<Button type="text" icon={darkMode ? <BulbFilled /> : <BulbOutlined />} onClick={toggleDarkMode} title="切换主题" />
|
||||
<Button type="text" icon={<ConsoleSqlOutlined />} onClick={handleNewQuery} title="新建查询" />
|
||||
<Button type="text" icon={<PlusOutlined />} onClick={() => setIsModalOpen(true)} title="新建连接" />
|
||||
<Dropdown menu={{ items: settingsMenu }} placement="bottomRight">
|
||||
<Button type="text" icon={<SettingOutlined />} title="更多设置" />
|
||||
</Dropdown>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{ flex: 1, overflow: 'hidden' }}>
|
||||
<Sidebar onEditConnection={handleEditConnection} />
|
||||
</div>
|
||||
|
||||
{/* Sidebar Footer for Log Toggle */}
|
||||
<div style={{ padding: '8px', borderTop: darkMode ? '1px solid #303030' : '1px solid #f0f0f0', display: 'flex', justifyContent: 'center', flexShrink: 0 }}>
|
||||
<div style={{ padding: '8px', borderTop: 'none', display: 'flex', justifyContent: 'center', flexShrink: 0 }}>
|
||||
<Button
|
||||
type={isLogPanelOpen ? "primary" : "text"}
|
||||
type={isLogPanelOpen ? "primary" : "text"}
|
||||
icon={<BugOutlined />}
|
||||
onClick={() => setIsLogPanelOpen(!isLogPanelOpen)}
|
||||
block
|
||||
@@ -285,18 +756,19 @@ function App() {
|
||||
title="拖动调整宽度"
|
||||
/>
|
||||
</Sider>
|
||||
<Content style={{ background: darkMode ? '#141414' : '#fff', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
<div style={{ flex: 1, overflow: 'hidden' }}>
|
||||
<TabManager />
|
||||
</div>
|
||||
{isLogPanelOpen && (
|
||||
<LogPanel
|
||||
<Content style={{ background: 'transparent', overflow: 'hidden', display: 'flex', flexDirection: 'column', minWidth: 0 }}>
|
||||
<div style={{ flex: 1, minHeight: 0, minWidth: 0, overflow: 'hidden', display: 'flex', flexDirection: 'column', background: bgContent }}>
|
||||
<TabManager />
|
||||
</div>
|
||||
{isLogPanelOpen && (
|
||||
<LogPanel
|
||||
height={logPanelHeight}
|
||||
onClose={() => setIsLogPanelOpen(false)}
|
||||
onResizeStart={handleLogResizeStart}
|
||||
/>
|
||||
)}
|
||||
</Content>
|
||||
</Layout>
|
||||
<ConnectionModal
|
||||
open={isModalOpen}
|
||||
onClose={handleCloseModal}
|
||||
@@ -306,6 +778,171 @@ function App() {
|
||||
open={isSyncModalOpen}
|
||||
onClose={() => setIsSyncModalOpen(false)}
|
||||
/>
|
||||
<Modal
|
||||
title="关于 GoNavi"
|
||||
open={isAboutOpen}
|
||||
onCancel={() => setIsAboutOpen(false)}
|
||||
footer={[
|
||||
lastUpdateInfo?.hasUpdate ? (
|
||||
<Button key="download" icon={<DownloadOutlined />} onClick={() => downloadUpdate(lastUpdateInfo, false)}>下载更新</Button>
|
||||
) : null,
|
||||
lastUpdateInfo?.hasUpdate ? (
|
||||
<Button key="mute" onClick={() => { updateMutedVersionRef.current = lastUpdateInfo.latestVersion; setIsAboutOpen(false); }}>本次不再提示</Button>
|
||||
) : null,
|
||||
<Button key="check" icon={<CloudDownloadOutlined />} onClick={() => checkForUpdates(false)}>检查更新</Button>,
|
||||
<Button key="close" type="primary" onClick={() => setIsAboutOpen(false)}>关闭</Button>
|
||||
].filter(Boolean)}
|
||||
>
|
||||
{aboutLoading ? (
|
||||
<div style={{ padding: '16px 0', textAlign: 'center' }}>
|
||||
<Spin />
|
||||
</div>
|
||||
) : (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 8 }}>
|
||||
<div>版本:{aboutInfo?.version || '未知'}</div>
|
||||
<div>作者:{aboutInfo?.author || '未知'}</div>
|
||||
<div>更新状态:{aboutUpdateStatus || '未检查'}</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<GithubOutlined />
|
||||
{aboutInfo?.repoUrl ? (
|
||||
<a onClick={(e) => { e.preventDefault(); (window as any).runtime.BrowserOpenURL(aboutInfo.repoUrl); }} href={aboutInfo.repoUrl}>
|
||||
{aboutInfo.repoUrl}
|
||||
</a>
|
||||
) : '未知'}
|
||||
</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<BugOutlined />
|
||||
{aboutInfo?.issueUrl ? (
|
||||
<a onClick={(e) => { e.preventDefault(); (window as any).runtime.BrowserOpenURL(aboutInfo.issueUrl); }} href={aboutInfo.issueUrl}>
|
||||
{aboutInfo.issueUrl}
|
||||
</a>
|
||||
) : '未知'}
|
||||
</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
|
||||
<CloudDownloadOutlined />
|
||||
{aboutInfo?.releaseUrl ? (
|
||||
<a onClick={(e) => { e.preventDefault(); (window as any).runtime.BrowserOpenURL(aboutInfo.releaseUrl); }} href={aboutInfo.releaseUrl}>
|
||||
{aboutInfo.releaseUrl}
|
||||
</a>
|
||||
) : '未知'}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
title="外观设置"
|
||||
open={isAppearanceModalOpen}
|
||||
onCancel={() => setIsAppearanceModalOpen(false)}
|
||||
footer={null}
|
||||
width={400}
|
||||
>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 24, padding: '12px 0' }}>
|
||||
<div>
|
||||
<div style={{ marginBottom: 8, fontWeight: 500 }}>背景不透明度 (Opacity)</div>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 16 }}>
|
||||
<Slider
|
||||
min={0.1}
|
||||
max={1.0}
|
||||
step={0.05}
|
||||
value={appearance.opacity ?? 1.0}
|
||||
onChange={(v) => setAppearance({ opacity: v })}
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
<span style={{ width: 40 }}>{Math.round((appearance.opacity ?? 1.0) * 100)}%</span>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div style={{ marginBottom: 8, fontWeight: 500 }}>高斯模糊 (Blur)</div>
|
||||
{isWindowsPlatform() ? (
|
||||
<div style={{ fontSize: 12, color: '#888' }}>
|
||||
Windows 使用系统 Acrylic 效果,模糊程度由系统控制
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 16 }}>
|
||||
<Slider
|
||||
min={0}
|
||||
max={20}
|
||||
value={appearance.blur ?? 0}
|
||||
onChange={(v) => setAppearance({ blur: v })}
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
<span style={{ width: 40 }}>{appearance.blur}px</span>
|
||||
</div>
|
||||
<div style={{ fontSize: 12, color: '#888', marginTop: 4 }}>
|
||||
* 仅控制应用内覆盖层的模糊效果
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
title={updateDownloadProgress.version ? `下载更新 ${updateDownloadProgress.version}` : '下载更新'}
|
||||
open={updateDownloadProgress.open}
|
||||
closable={updateDownloadProgress.status === 'error'}
|
||||
maskClosable={false}
|
||||
keyboard={updateDownloadProgress.status === 'error'}
|
||||
onCancel={() => {
|
||||
if (updateDownloadProgress.status === 'error') {
|
||||
setUpdateDownloadProgress({
|
||||
open: false,
|
||||
version: '',
|
||||
status: 'idle',
|
||||
percent: 0,
|
||||
downloaded: 0,
|
||||
total: 0,
|
||||
message: ''
|
||||
});
|
||||
}
|
||||
}}
|
||||
footer={updateDownloadProgress.status === 'error' ? [
|
||||
<Button
|
||||
key="close"
|
||||
onClick={() => setUpdateDownloadProgress({
|
||||
open: false,
|
||||
version: '',
|
||||
status: 'idle',
|
||||
percent: 0,
|
||||
downloaded: 0,
|
||||
total: 0,
|
||||
message: ''
|
||||
})}
|
||||
>
|
||||
关闭
|
||||
</Button>
|
||||
] : null}
|
||||
>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<Progress
|
||||
percent={Math.round(updateDownloadProgress.percent)}
|
||||
status={updateDownloadProgress.status === 'error' ? 'exception' : (updateDownloadProgress.status === 'done' ? 'success' : 'active')}
|
||||
/>
|
||||
<div style={{ fontSize: 12, color: '#8c8c8c' }}>
|
||||
{`${formatBytes(updateDownloadProgress.downloaded)} / ${formatBytes(updateDownloadProgress.total)}`}
|
||||
</div>
|
||||
{updateDownloadProgress.message ? (
|
||||
<div style={{ fontSize: 12, color: '#ff4d4f' }}>{updateDownloadProgress.message}</div>
|
||||
) : null}
|
||||
</div>
|
||||
</Modal>
|
||||
|
||||
{showLinuxResizeHandles && (
|
||||
<>
|
||||
{/* Linux Mint 下 frameless 仅局部可缩放:补四边四角命中层 */}
|
||||
<div style={{ ...linuxResizeHandleStyleBase, top: 0, left: 14, right: 14, height: 6, cursor: 'ns-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, bottom: 0, left: 14, right: 14, height: 6, cursor: 'ns-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, top: 14, bottom: 14, left: 0, width: 6, cursor: 'ew-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, top: 14, bottom: 14, right: 0, width: 6, cursor: 'ew-resize' }} />
|
||||
|
||||
<div style={{ ...linuxResizeHandleStyleBase, top: 0, left: 0, width: 14, height: 14, cursor: 'nwse-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, top: 0, right: 0, width: 14, height: 14, cursor: 'nesw-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, bottom: 0, left: 0, width: 14, height: 14, cursor: 'nesw-resize' }} />
|
||||
<div style={{ ...linuxResizeHandleStyleBase, bottom: 0, right: 0, width: 14, height: 14, cursor: 'nwse-resize' }} />
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Ghost Resize Line for Sidebar */}
|
||||
<div
|
||||
@@ -343,4 +980,4 @@ function App() {
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
export default App;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,36 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography } from 'antd';
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync } from '../../wailsjs/go/app/App';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
import { connection } from '../../wailsjs/go/models';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
|
||||
const { Title, Text } = Typography;
|
||||
const { Step } = Steps;
|
||||
const { Option } = Select;
|
||||
|
||||
type SyncLogEvent = { jobId: string; level?: string; message?: string; ts?: number };
|
||||
type SyncProgressEvent = { jobId: string; percent?: number; current?: number; total?: number; table?: string; stage?: string };
|
||||
type SyncLogItem = { level: string; message: string; ts?: number };
|
||||
type TableDiffSummary = {
|
||||
table: string;
|
||||
pkColumn?: string;
|
||||
canSync?: boolean;
|
||||
inserts?: number;
|
||||
updates?: number;
|
||||
deletes?: number;
|
||||
same?: number;
|
||||
message?: string;
|
||||
};
|
||||
type TableOps = {
|
||||
insert: boolean;
|
||||
update: boolean;
|
||||
delete: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
@@ -27,8 +49,76 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const [allTables, setAllTables] = useState<string[]>([]);
|
||||
const [selectedTables, setSelectedTables] = useState<string[]>([]);
|
||||
|
||||
// Options
|
||||
const [syncContent, setSyncContent] = useState<'data' | 'schema' | 'both'>('data');
|
||||
const [syncMode, setSyncMode] = useState<string>('insert_update');
|
||||
const [autoAddColumns, setAutoAddColumns] = useState<boolean>(true);
|
||||
const [showSameTables, setShowSameTables] = useState<boolean>(false);
|
||||
const [analyzing, setAnalyzing] = useState<boolean>(false);
|
||||
const [diffTables, setDiffTables] = useState<TableDiffSummary[]>([]);
|
||||
const [tableOptions, setTableOptions] = useState<Record<string, TableOps>>({});
|
||||
|
||||
const [previewOpen, setPreviewOpen] = useState(false);
|
||||
const [previewTable, setPreviewTable] = useState<string>('');
|
||||
const [previewLoading, setPreviewLoading] = useState(false);
|
||||
const [previewData, setPreviewData] = useState<any>(null);
|
||||
|
||||
// Step 3: Result
|
||||
const [syncResult, setSyncResult] = useState<any>(null);
|
||||
const [syncing, setSyncing] = useState(false);
|
||||
const [syncLogs, setSyncLogs] = useState<SyncLogItem[]>([]);
|
||||
const [syncProgress, setSyncProgress] = useState<{ percent: number; current: number; total: number; table: string; stage: string }>({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: 0,
|
||||
table: '',
|
||||
stage: ''
|
||||
});
|
||||
const jobIdRef = useRef<string>('');
|
||||
const logBoxRef = useRef<HTMLDivElement>(null);
|
||||
const autoScrollRef = useRef(true);
|
||||
|
||||
const normalizeConnConfig = (conn: SavedConnection, database?: string) => ({
|
||||
...conn.config,
|
||||
port: Number((conn.config as any).port),
|
||||
password: conn.config.password || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: typeof database === 'string' ? database : (conn.config.database || ""),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!open) return;
|
||||
|
||||
const offLog = EventsOn('sync:log', (event: SyncLogEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
const msg = String(event.message || '').trim();
|
||||
if (!msg) return;
|
||||
setSyncLogs(prev => [...prev, { level: String(event.level || 'info'), message: msg, ts: event.ts }]);
|
||||
});
|
||||
|
||||
const offProgress = EventsOn('sync:progress', (event: SyncProgressEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
setSyncProgress(prev => ({
|
||||
percent: typeof event.percent === 'number' ? event.percent : prev.percent,
|
||||
current: typeof event.current === 'number' ? event.current : prev.current,
|
||||
total: typeof event.total === 'number' ? event.total : prev.total,
|
||||
table: typeof event.table === 'string' ? event.table : prev.table,
|
||||
stage: typeof event.stage === 'string' ? event.stage : prev.stage,
|
||||
}));
|
||||
});
|
||||
|
||||
return () => {
|
||||
offLog();
|
||||
offProgress();
|
||||
};
|
||||
}, [open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!logBoxRef.current) return;
|
||||
if (!autoScrollRef.current) return;
|
||||
logBoxRef.current.scrollTop = logBoxRef.current.scrollHeight;
|
||||
}, [syncLogs]);
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
@@ -38,7 +128,23 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setSourceDb('');
|
||||
setTargetDb('');
|
||||
setSelectedTables([]);
|
||||
setSyncContent('data');
|
||||
setSyncMode('insert_update');
|
||||
setAutoAddColumns(true);
|
||||
setShowSameTables(false);
|
||||
setAnalyzing(false);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setPreviewOpen(false);
|
||||
setPreviewTable('');
|
||||
setPreviewLoading(false);
|
||||
setPreviewData(null);
|
||||
setSyncResult(null);
|
||||
setSyncing(false);
|
||||
setSyncLogs([]);
|
||||
setSyncProgress({ percent: 0, current: 0, total: 0, table: '', stage: '' });
|
||||
jobIdRef.current = '';
|
||||
autoScrollRef.current = true;
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
@@ -49,7 +155,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(conn.config as any);
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
@@ -65,7 +171,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(conn.config as any);
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
@@ -83,7 +189,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = { ...conn.config, database: sourceDb };
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
@@ -98,36 +204,221 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const runSync = async () => {
|
||||
const updateTableOption = (table: string, key: keyof TableOps, value: any) => {
|
||||
setTableOptions(prev => ({
|
||||
...prev,
|
||||
[table]: { ...(prev[table] || { insert: true, update: true, delete: false }), [key]: value }
|
||||
}));
|
||||
};
|
||||
|
||||
const analyzeDiff = async () => {
|
||||
if (selectedTables.length === 0) return;
|
||||
if (!sourceConnId || !targetConnId) return message.error("Select connections first");
|
||||
if (!sourceDb || !targetDb) return message.error("Select databases first");
|
||||
|
||||
setLoading(true);
|
||||
setAnalyzing(true);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
const jobId = `analyze-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({ percent: 0, current: 0, total: selectedTables.length, table: '', stage: '差异分析' });
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: syncContent,
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncAnalyze(config as any);
|
||||
if (res.success) {
|
||||
const tables = ((res.data as any)?.tables || []) as TableDiffSummary[];
|
||||
setDiffTables(tables);
|
||||
const init: Record<string, TableOps> = {};
|
||||
tables.forEach(t => {
|
||||
const can = !!t.canSync;
|
||||
init[t.table] = {
|
||||
insert: can,
|
||||
update: can,
|
||||
delete: false,
|
||||
selectedInsertPks: [],
|
||||
selectedUpdatePks: [],
|
||||
selectedDeletePks: [],
|
||||
};
|
||||
});
|
||||
setTableOptions(init);
|
||||
message.success("差异分析完成");
|
||||
} else {
|
||||
message.error(res.message || "差异分析失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("差异分析失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
setAnalyzing(false);
|
||||
};
|
||||
|
||||
const openPreview = async (table: string) => {
|
||||
if (!table) return;
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
setPreviewOpen(true);
|
||||
setPreviewTable(table);
|
||||
setPreviewLoading(true);
|
||||
setPreviewData(null);
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: "data",
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncPreview(config as any, table, 200);
|
||||
if (res.success) {
|
||||
setPreviewData(res.data);
|
||||
} else {
|
||||
message.error(res.message || "加载差异预览失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("加载差异预览失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setPreviewLoading(false);
|
||||
};
|
||||
|
||||
const runSync = async () => {
|
||||
if (syncContent !== 'schema' && diffTables.length === 0) {
|
||||
message.error("请先对比差异,再开始同步");
|
||||
return;
|
||||
}
|
||||
if (syncContent !== 'schema' && syncMode === 'full_overwrite') {
|
||||
const ok = await new Promise<boolean>((resolve) => {
|
||||
Modal.confirm({
|
||||
title: '确认全量覆盖',
|
||||
content: '全量覆盖会清空目标表数据后再插入,请确认已备份目标库。',
|
||||
okText: '继续执行',
|
||||
cancelText: '取消',
|
||||
onOk: () => resolve(true),
|
||||
onCancel: () => resolve(false),
|
||||
});
|
||||
});
|
||||
if (!ok) return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setSyncing(true);
|
||||
setCurrentStep(2);
|
||||
setSyncResult(null);
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
const jobId = `sync-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: selectedTables.length,
|
||||
table: '',
|
||||
stage: '准备开始',
|
||||
});
|
||||
|
||||
const config = {
|
||||
sourceConfig: { ...sConn.config, database: sourceDb },
|
||||
targetConfig: { ...tConn.config, database: targetDb },
|
||||
sourceConfig: {
|
||||
...sConn.config,
|
||||
port: Number((sConn.config as any).port),
|
||||
password: sConn.config.password || "",
|
||||
useSSH: sConn.config.useSSH || false,
|
||||
ssh: sConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: sourceDb,
|
||||
},
|
||||
targetConfig: {
|
||||
...tConn.config,
|
||||
port: Number((tConn.config as any).port),
|
||||
password: tConn.config.password || "",
|
||||
useSSH: tConn.config.useSSH || false,
|
||||
ssh: tConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: targetDb,
|
||||
},
|
||||
tables: selectedTables,
|
||||
mode: "insert_update"
|
||||
content: syncContent,
|
||||
mode: syncMode,
|
||||
autoAddColumns,
|
||||
tableOptions,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSync(config as any);
|
||||
setSyncResult(res);
|
||||
setCurrentStep(2);
|
||||
if (Array.isArray(res?.logs) && res.logs.length > 0) {
|
||||
setSyncLogs(prev => {
|
||||
if (prev.length > 0) return prev;
|
||||
return (res.logs as string[]).map((log) => {
|
||||
const msg = String(log || '').trim();
|
||||
if (msg.includes('致命错误') || msg.includes('失败')) return { level: 'error', message: msg };
|
||||
if (msg.includes('跳过') || msg.includes('警告')) return { level: 'warn', message: msg };
|
||||
return { level: 'info', message: msg };
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
message.error("Sync execution failed");
|
||||
setSyncResult({ success: false, message: "同步执行失败", logs: [] });
|
||||
}
|
||||
setLoading(false);
|
||||
setSyncing(false);
|
||||
};
|
||||
|
||||
const renderSyncLogItem = (item: SyncLogItem) => {
|
||||
const level = String(item.level || 'info').toLowerCase();
|
||||
const color = level === 'error' ? '#ff4d4f' : (level === 'warn' ? '#faad14' : '#595959');
|
||||
const label = level === 'error' ? '错误' : (level === 'warn' ? '警告' : '信息');
|
||||
const timeText = typeof item.ts === 'number' ? new Date(item.ts).toLocaleTimeString('zh-CN', { hour12: false }) : '';
|
||||
return (
|
||||
<div style={{ display: 'flex', gap: 8, alignItems: 'flex-start' }}>
|
||||
<span style={{ color, flex: '0 0 auto' }}>● {label}</span>
|
||||
{timeText && <span style={{ color: '#8c8c8c', flex: '0 0 auto' }}>{timeText}</span>}
|
||||
<span style={{ whiteSpace: 'pre-wrap', wordBreak: 'break-word' }}>{item.message}</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title="数据同步"
|
||||
open={open}
|
||||
onCancel={onClose}
|
||||
width={800}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
title="数据同步"
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
if (syncing) {
|
||||
message.warning("同步执行中,暂不支持关闭");
|
||||
return;
|
||||
}
|
||||
onClose();
|
||||
}}
|
||||
width={800}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
closable={!syncing}
|
||||
maskClosable={!syncing}
|
||||
>
|
||||
<Steps current={currentStep} style={{ marginBottom: 24 }}>
|
||||
<Step title="配置源与目标" />
|
||||
@@ -137,34 +428,67 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 1: CONFIG */}
|
||||
{currentStep === 0 && (
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={sourceDb} onChange={setSourceDb} showSearch>
|
||||
{sourceDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={targetDb} onChange={setTargetDb} showSearch>
|
||||
{targetDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<Card title="同步选项" style={{ marginTop: 16 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
<Form.Item label="同步内容">
|
||||
<Select value={syncContent} onChange={setSyncContent}>
|
||||
<Option value="data">仅同步数据</Option>
|
||||
<Option value="schema">仅同步结构</Option>
|
||||
<Option value="both">同步结构 + 数据</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={sourceDb} onChange={setSourceDb} showSearch>
|
||||
{sourceDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
<Form.Item label="同步模式">
|
||||
<Select value={syncMode} onChange={setSyncMode} disabled={syncContent === 'schema'}>
|
||||
<Option value="insert_update">增量同步(对比差异,按插入/更新/删除勾选执行)</Option>
|
||||
<Option value="insert_only">仅插入(不对比目标;无主键表将跳过)</Option>
|
||||
<Option value="full_overwrite">全量覆盖(清空目标表后插入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={targetDb} onChange={setTargetDb} showSearch>
|
||||
{targetDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
<Form.Item>
|
||||
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
|
||||
自动补齐目标表缺失字段(仅 MySQL 目标)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
{syncContent !== 'schema' && syncMode === 'full_overwrite' && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="全量覆盖会清空目标表数据,请谨慎使用。"
|
||||
/>
|
||||
)}
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
@@ -172,32 +496,155 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 2: TABLES */}
|
||||
{currentStep === 1 && (
|
||||
<div style={{ height: 400 }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
|
||||
显示相同表
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Transfer
|
||||
dataSource={allTables.map(t => ({ key: t, title: t }))}
|
||||
titles={['源表', '已选表']}
|
||||
targetKeys={selectedTables}
|
||||
onChange={(keys) => setSelectedTables(keys as string[])}
|
||||
render={item => item.title}
|
||||
listStyle={{ width: 350, height: 350, marginTop: 12 }}
|
||||
listStyle={{ width: 350, height: 280, marginTop: 0 }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表', notFoundContent: '暂无数据' }}
|
||||
/>
|
||||
|
||||
{diffTables.length > 0 && (
|
||||
<div>
|
||||
<Divider orientation="left">对比结果</Divider>
|
||||
<Table
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey={(r: any) => r.table}
|
||||
dataSource={diffTables.filter(t => {
|
||||
const ins = Number(t.inserts || 0);
|
||||
const upd = Number(t.updates || 0);
|
||||
const del = Number(t.deletes || 0);
|
||||
const same = Number(t.same || 0);
|
||||
const msg = String(t.message || '').trim();
|
||||
const can = !!t.canSync;
|
||||
if (showSameTables) return true;
|
||||
if (!can) return true;
|
||||
if (msg) return true;
|
||||
return ins > 0 || upd > 0 || del > 0 || same === 0;
|
||||
})}
|
||||
columns={[
|
||||
{ title: '表名', dataIndex: 'table', key: 'table', ellipsis: true },
|
||||
{
|
||||
title: '插入',
|
||||
key: 'inserts',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.inserts || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.insert}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}
|
||||
>
|
||||
{Number(r.inserts || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '更新',
|
||||
key: 'updates',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.updates || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.update}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}
|
||||
>
|
||||
{Number(r.updates || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '删除',
|
||||
key: 'deletes',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.deletes || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.delete}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}
|
||||
>
|
||||
{Number(r.deletes || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{ title: '相同', dataIndex: 'same', key: 'same', width: 70, render: (v: any) => Number(v || 0) },
|
||||
{ title: '消息', dataIndex: 'message', key: 'message', ellipsis: true, render: (v: any) => (v ? String(v) : '') },
|
||||
{
|
||||
title: '预览',
|
||||
key: 'preview',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => {
|
||||
const can = !!r.canSync;
|
||||
const hasDiff = Number(r.inserts || 0) + Number(r.updates || 0) + Number(r.deletes || 0) > 0;
|
||||
return (
|
||||
<Button size="small" disabled={!can || !hasDiff || analyzing} onClick={() => openPreview(r.table)}>
|
||||
查看
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* STEP 3: RESULT */}
|
||||
{currentStep === 2 && syncResult && (
|
||||
{currentStep === 2 && (
|
||||
<div>
|
||||
<Alert
|
||||
message={syncResult.success ? "同步完成" : "同步失败"}
|
||||
description={syncResult.message || `成功同步 ${syncResult.tablesSynced} 张表. 插入: ${syncResult.rowsInserted}, 更新: ${syncResult.rowsUpdated}`}
|
||||
type={syncResult.success ? "success" : "error"}
|
||||
showIcon
|
||||
<Alert
|
||||
message={syncing ? "正在同步" : (syncResult?.success ? "同步完成" : "同步失败")}
|
||||
description={
|
||||
syncing
|
||||
? `当前阶段:${syncProgress.stage || '执行中'}${syncProgress.table ? `,表:${syncProgress.table}` : ''}`
|
||||
: (syncResult?.message || `成功同步 ${syncResult?.tablesSynced || 0} 张表. 插入: ${syncResult?.rowsInserted || 0}, 更新: ${syncResult?.rowsUpdated || 0}`)
|
||||
}
|
||||
type={syncing ? "info" : (syncResult?.success ? "success" : "error")}
|
||||
showIcon
|
||||
/>
|
||||
|
||||
<div style={{ marginTop: 12 }}>
|
||||
<Progress
|
||||
percent={syncProgress.percent}
|
||||
status={syncing ? "active" : (syncResult?.success ? "success" : "exception")}
|
||||
format={() => `${syncProgress.current}/${syncProgress.total}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Divider orientation="left">日志</Divider>
|
||||
<div style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}>
|
||||
{syncResult.logs.map((log: string, i: number) => <div key={i}>{log}</div>)}
|
||||
<div
|
||||
ref={logBoxRef}
|
||||
onScroll={() => {
|
||||
const el = logBoxRef.current;
|
||||
if (!el) return;
|
||||
const nearBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
|
||||
autoScrollRef.current = nearBottom;
|
||||
}}
|
||||
style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}
|
||||
>
|
||||
{syncLogs.map((item, i: number) => <div key={i}>{renderSyncLogItem(item)}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -206,20 +653,154 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
{currentStep === 0 && (
|
||||
<Button type="primary" onClick={nextToTables} loading={loading}>下一步</Button>
|
||||
)}
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}>上一步</Button>
|
||||
<Button type="primary" onClick={runSync} loading={loading} disabled={selectedTables.length === 0}>开始同步</Button>
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}>上一步</Button>
|
||||
<Button onClick={analyzeDiff} loading={loading} disabled={syncContent === 'schema' || selectedTables.length === 0 || analyzing} style={{ marginRight: 8 }}>
|
||||
对比差异
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runSync}
|
||||
loading={loading}
|
||||
disabled={selectedTables.length === 0 || (syncContent !== 'schema' && diffTables.length === 0)}
|
||||
>
|
||||
开始同步
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
{currentStep === 2 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(1)} style={{ marginRight: 8 }}>继续同步</Button>
|
||||
<Button type="primary" onClick={onClose}>关闭</Button>
|
||||
<Button disabled={syncing} onClick={() => setCurrentStep(1)} style={{ marginRight: 8 }}>继续同步</Button>
|
||||
<Button type="primary" disabled={syncing} onClick={onClose}>关闭</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
<Drawer
|
||||
title={`差异预览:${previewTable}`}
|
||||
open={previewOpen}
|
||||
onClose={() => { setPreviewOpen(false); setPreviewTable(''); setPreviewData(null); }}
|
||||
width={900}
|
||||
>
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览..." />}
|
||||
{!previewLoading && previewData && (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message={`插入 ${previewData.totalInserts || 0},更新 ${previewData.totalUpdates || 0},删除 ${previewData.totalDeletes || 0}(预览最多展示 200 条/类型)`}
|
||||
/>
|
||||
<Divider />
|
||||
<Tabs
|
||||
items={[
|
||||
{
|
||||
key: 'insert',
|
||||
label: `插入(${previewData.totalInserts || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部插入差异”;如不想执行插入请在对比结果中取消勾选“插入”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.inserts || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedInsertPks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedInsertPks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.insert }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'update',
|
||||
label: `更新(${previewData.totalUpdates || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部更新差异”;如不想执行更新请在对比结果中取消勾选“更新”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.updates || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedUpdatePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedUpdatePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.update }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '变更字段', dataIndex: 'changedColumns', key: 'changedColumns', render: (v: any) => Array.isArray(v) ? v.join(', ') : '' },
|
||||
{
|
||||
title: '详情',
|
||||
key: 'detail',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => (
|
||||
<Button size="small" onClick={() => {
|
||||
Modal.info({
|
||||
title: `更新详情:${previewTable} / ${r.pk}`,
|
||||
width: 900,
|
||||
content: (
|
||||
<div style={{ display: 'flex', gap: 12 }}>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>源</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.source, null, 2)}</pre>
|
||||
</div>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>目标</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.target, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
});
|
||||
}}>查看</Button>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'delete',
|
||||
label: `删除(${previewData.totalDeletes || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert type="warning" showIcon message="删除默认不勾选。请确认业务允许后再开启删除操作。" />
|
||||
<Text type="secondary">未勾选任何行表示“同步全部删除差异”;如不想执行删除请在对比结果中取消勾选“删除”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.deletes || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedDeletePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedDeletePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.delete }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,34 +1,52 @@
|
||||
import React, { useEffect, useState, useCallback } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid from './DataGrid';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { buildOrderBySQL, buildWhereSQL, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
const [columnNames, setColumnNames] = useState<string[]>([]);
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { connections, addSqlLog } = useStore();
|
||||
const connections = useStore(state => state.connections);
|
||||
const addSqlLog = useStore(state => state.addSqlLog);
|
||||
const fetchSeqRef = useRef(0);
|
||||
const countSeqRef = useRef(0);
|
||||
const countKeyRef = useRef<string>('');
|
||||
const pkSeqRef = useRef(0);
|
||||
const pkKeyRef = useRef<string>('');
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
total: 0
|
||||
total: 0,
|
||||
totalKnown: false
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const [filterConditions, setFilterConditions] = useState<any[]>([]);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>([]);
|
||||
const currentConnType = (connections.find(c => c.id === tab.connectionId)?.config?.type || '').toLowerCase();
|
||||
const forceReadOnly = currentConnType === 'tdengine';
|
||||
|
||||
useEffect(() => {
|
||||
setPkColumns([]);
|
||||
pkKeyRef.current = '';
|
||||
countKeyRef.current = '';
|
||||
setPagination(prev => ({ ...prev, current: 1, total: 0, totalKnown: false }));
|
||||
}, [tab.connectionId, tab.dbName, tab.tableName]);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
setLoading(true);
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
message.error("Connection not found");
|
||||
setLoading(false);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -41,135 +59,201 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
|
||||
const quoteIdent = (ident: string) => {
|
||||
if (!ident) return ident;
|
||||
if (config.type === 'mysql') return `\`${ident.replace(/`/g, '``')}\``;
|
||||
return `"${ident.replace(/"/g, '""')}"`;
|
||||
};
|
||||
const escapeLiteral = (val: string) => val.replace(/'/g, "''");
|
||||
const dbType = config.type || '';
|
||||
const dbTypeLower = String(dbType || '').trim().toLowerCase();
|
||||
const isMySQLFamily = dbTypeLower === 'mysql' || dbTypeLower === 'mariadb';
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const tableName = tab.tableName || '';
|
||||
|
||||
const whereParts: string[] = [];
|
||||
filterConditions.forEach(cond => {
|
||||
if (cond.column && cond.value) {
|
||||
if (cond.op === 'LIKE') {
|
||||
whereParts.push(`${quoteIdent(cond.column)} LIKE '%${escapeLiteral(cond.value)}%'`);
|
||||
} else {
|
||||
whereParts.push(`${quoteIdent(cond.column)} ${cond.op} '${escapeLiteral(cond.value)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
const whereSQL = whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : "";
|
||||
const whereSQL = buildWhereSQL(dbType, filterConditions);
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteIdent(tableName)} ${whereSQL}`;
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
|
||||
let sql = `SELECT * FROM ${quoteIdent(tableName)} ${whereSQL}`;
|
||||
if (sortInfo && sortInfo.order) {
|
||||
sql += ` ORDER BY ${quoteIdent(sortInfo.columnKey)} ${sortInfo.order === 'ascend' ? 'ASC' : 'DESC'}`;
|
||||
}
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const offset = (page - 1) * size;
|
||||
sql += ` LIMIT ${size} OFFSET ${offset}`;
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
|
||||
const startTime = Date.now();
|
||||
const requestStartTime = Date.now();
|
||||
let executedSql = sql;
|
||||
try {
|
||||
const pCount = DBQuery(config as any, dbName, countSql);
|
||||
const pData = DBQuery(config as any, dbName, sql);
|
||||
|
||||
let pCols = null;
|
||||
if (pkColumns.length === 0) {
|
||||
pCols = DBGetColumns(config as any, dbName, tableName);
|
||||
}
|
||||
const executeDataQuery = async (querySql: string, attemptLabel: string) => {
|
||||
const startTime = Date.now();
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const [resCount, resData] = await Promise.all([pCount, pData]);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log Execution
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount.success ? 'success' : 'error',
|
||||
duration: duration / 2, // Estimate
|
||||
message: resCount.success ? '' : resCount.message,
|
||||
dbName
|
||||
});
|
||||
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: sql,
|
||||
status: resData.success ? 'success' : 'error',
|
||||
duration: duration,
|
||||
message: resData.success ? '' : resData.message,
|
||||
affectedRows: Array.isArray(resData.data) ? resData.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
|
||||
if (pCols) {
|
||||
const resCols = await pCols;
|
||||
if (resCols.success) {
|
||||
const pks = (resCols.data as ColumnDefinition[]).filter(c => c.key === 'PRI').map(c => c.name);
|
||||
setPkColumns(pks);
|
||||
const hasSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
const isSortMemoryErr = (msg: string) => /error\s*1038|out of sort memory/i.test(String(msg || ''));
|
||||
let resData = await executeDataQuery(sql, '主查询');
|
||||
|
||||
if (!resData.success && isMySQLFamily && hasSort && isSortMemoryErr(resData.message)) {
|
||||
const retrySql32MB = withSortBufferTuningSQL(dbType, sql, 32 * 1024 * 1024);
|
||||
if (retrySql32MB !== sql) {
|
||||
executedSql = retrySql32MB;
|
||||
resData = await executeDataQuery(retrySql32MB, '重试(32MB sort_buffer)');
|
||||
}
|
||||
if (!resData.success && isSortMemoryErr(resData.message)) {
|
||||
const retrySql128MB = withSortBufferTuningSQL(dbType, sql, 128 * 1024 * 1024);
|
||||
if (retrySql128MB !== executedSql) {
|
||||
executedSql = retrySql128MB;
|
||||
resData = await executeDataQuery(retrySql128MB, '重试(128MB sort_buffer)');
|
||||
}
|
||||
}
|
||||
if (resData.success) {
|
||||
message.warning('已自动提升排序缓冲并重试成功。');
|
||||
}
|
||||
}
|
||||
|
||||
let totalRecords = 0;
|
||||
if (resCount.success && Array.isArray(resCount.data) && resCount.data.length > 0) {
|
||||
totalRecords = Number(resCount.data[0]['total']);
|
||||
|
||||
if (pkColumns.length === 0) {
|
||||
const pkKey = `${tab.connectionId}|${dbName}|${tableName}`;
|
||||
if (pkKeyRef.current !== pkKey) {
|
||||
pkKeyRef.current = pkKey;
|
||||
const pkSeq = ++pkSeqRef.current;
|
||||
DBGetColumns(config as any, dbName, tableName)
|
||||
.then((resCols: any) => {
|
||||
if (pkSeqRef.current !== pkSeq) return;
|
||||
if (pkKeyRef.current !== pkKey) return;
|
||||
if (!resCols?.success) return;
|
||||
const pks = (resCols.data as ColumnDefinition[]).filter((c: any) => c.key === 'PRI').map((c: any) => c.name);
|
||||
setPkColumns(pks);
|
||||
})
|
||||
.catch(() => {
|
||||
if (pkSeqRef.current !== pkSeq) return;
|
||||
if (pkKeyRef.current !== pkKey) return;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (resData.success) {
|
||||
let resultData = resData.data as any[];
|
||||
if (!Array.isArray(resultData)) resultData = [];
|
||||
|
||||
const hasMore = resultData.length > size;
|
||||
if (hasMore) resultData = resultData.slice(0, size);
|
||||
|
||||
let fieldNames = resData.fields || [];
|
||||
if (fieldNames.length === 0 && resultData.length > 0) {
|
||||
fieldNames = Object.keys(resultData[0]);
|
||||
}
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
setColumnNames(fieldNames);
|
||||
|
||||
setData(resultData.map((row: any, i: number) => ({ ...row, key: `row-${i}` })));
|
||||
|
||||
setPagination(prev => ({ ...prev, current: page, pageSize: size, total: totalRecords }));
|
||||
resultData.forEach((row: any, i: number) => {
|
||||
if (row && typeof row === 'object') row[GONAVI_ROW_KEY] = `row-${offset + i}`;
|
||||
});
|
||||
setData(resultData);
|
||||
const countKey = `${tab.connectionId}|${dbName}|${tableName}|${whereSQL}`;
|
||||
const derivedTotalKnown = !hasMore;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : page * size + 1;
|
||||
if (derivedTotalKnown) countKeyRef.current = countKey;
|
||||
|
||||
setPagination(prev => {
|
||||
if (derivedTotalKnown) {
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: true };
|
||||
}
|
||||
if (prev.totalKnown && countKeyRef.current === countKey) {
|
||||
return { ...prev, current: page, pageSize: size };
|
||||
}
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: false };
|
||||
});
|
||||
|
||||
if (!derivedTotalKnown) {
|
||||
if (countKeyRef.current !== countKey) {
|
||||
countKeyRef.current = countKey;
|
||||
const countSeq = ++countSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
// 大表 COUNT(*) 可能非常慢,且在部分运行时环境下会影响后续操作响应;
|
||||
// 这里为统计请求设置更短的超时,避免“后台统计”长期占用资源。
|
||||
const countConfig: any = { ...(config as any), timeout: 5 };
|
||||
|
||||
DBQuery(countConfig, dbName, countSql)
|
||||
.then((resCount: any) => {
|
||||
const countDuration = Date.now() - countStart;
|
||||
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount.success ? 'success' : 'error',
|
||||
duration: countDuration,
|
||||
message: resCount.success ? '' : resCount.message,
|
||||
dbName
|
||||
});
|
||||
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
|
||||
if (!resCount.success) return;
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) return;
|
||||
|
||||
const total = Number(resCount.data[0]?.['total']);
|
||||
if (!Number.isFinite(total) || total < 0) return;
|
||||
|
||||
setPagination(prev => ({ ...prev, total, totalKnown: true }));
|
||||
})
|
||||
.catch(() => {
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
// 统计失败不影响主流程,不弹窗;可在日志里查看。
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
message.error(resData.message);
|
||||
message.error(String(resData.message || '查询失败'));
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
message.error("Error fetching data: " + e.message);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-error`,
|
||||
timestamp: Date.now(),
|
||||
sql: sql,
|
||||
sql: executedSql,
|
||||
status: 'error',
|
||||
duration: Date.now() - startTime,
|
||||
duration: Date.now() - requestStartTime,
|
||||
message: e.message,
|
||||
dbName
|
||||
});
|
||||
}
|
||||
setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns.length]);
|
||||
// Depend on pkColumns.length to avoid loop? No, pkColumns is updated inside.
|
||||
// Actually, 'pkColumns' state shouldn't trigger re-fetch.
|
||||
// The 'if (pkColumns.length === 0)' check is inside.
|
||||
// So adding pkColumns to dependency is safer but might trigger double fetch if not careful?
|
||||
// Only if pkColumns changes. It changes once from [] to [...].
|
||||
// So it's fine.
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns]);
|
||||
// 依赖 pkColumns:在无手动排序时可回退到主键稳定排序。
|
||||
// 主键信息只会在首次加载后更新一次,避免循环查询。
|
||||
|
||||
// Handlers memoized
|
||||
const handleReload = useCallback(() => fetchData(), [fetchData]);
|
||||
const handleSort = useCallback((field: string, order: string) => setSortInfo({ columnKey: field, order }), []);
|
||||
const handleReload = useCallback(() => {
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
}, [fetchData, pagination.current, pagination.pageSize]);
|
||||
const handleSort = useCallback((field: string, order: string) => {
|
||||
const normalizedOrder = order === 'ascend' || order === 'descend' ? order : '';
|
||||
const normalizedField = String(field || '').trim();
|
||||
if (!normalizedField || !normalizedOrder) {
|
||||
setSortInfo(null);
|
||||
return;
|
||||
}
|
||||
setSortInfo({ columnKey: normalizedField, order: normalizedOrder });
|
||||
}, []);
|
||||
const handlePageChange = useCallback((page: number, size: number) => fetchData(page, size), [fetchData]);
|
||||
const handleToggleFilter = useCallback(() => setShowFilter(prev => !prev), []);
|
||||
const handleApplyFilter = useCallback((conditions: any[]) => setFilterConditions(conditions), []);
|
||||
const handleApplyFilter = useCallback((conditions: FilterCondition[]) => setFilterConditions(conditions), []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchData(1, pagination.pageSize);
|
||||
}, [tab, sortInfo, filterConditions]); // Initial load and re-load on sort/filter
|
||||
|
||||
return (
|
||||
<div style={{ height: '100%', width: '100%', overflow: 'hidden' }}>
|
||||
<div style={{ flex: '1 1 auto', minHeight: 0, minWidth: 0, height: '100%', width: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column' }}>
|
||||
<DataGrid
|
||||
data={data}
|
||||
columnNames={columnNames}
|
||||
@@ -185,6 +269,8 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
showFilter={showFilter}
|
||||
onToggleFilter={handleToggleFilter}
|
||||
onApplyFilter={handleApplyFilter}
|
||||
readOnly={forceReadOnly}
|
||||
sortInfoExternal={sortInfo}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
393
frontend/src/components/DefinitionViewer.tsx
Normal file
393
frontend/src/components/DefinitionViewer.tsx
Normal file
@@ -0,0 +1,393 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import Editor from '@monaco-editor/react';
|
||||
import { Spin, Alert } from 'antd';
|
||||
import { TabData } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery } from '../../wailsjs/go/app/App';
|
||||
|
||||
interface DefinitionViewerProps {
|
||||
tab: TabData;
|
||||
}
|
||||
|
||||
const DefinitionViewer: React.FC<DefinitionViewerProps> = ({ tab }) => {
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [definition, setDefinition] = useState<string>('');
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
|
||||
const escapeSQLLiteral = (raw: string): string => String(raw || '').replace(/'/g, "''");
|
||||
|
||||
const getMetadataDialect = (conn: any): string => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'custom') {
|
||||
return String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
}
|
||||
if (type === 'mariadb' || type === 'sphinx') return 'mysql';
|
||||
if (type === 'dameng') return 'dm';
|
||||
return type;
|
||||
};
|
||||
|
||||
const isSphinxConnection = (conn: any): boolean => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'sphinx') return true;
|
||||
if (type !== 'custom') return false;
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
return driver === 'sphinx' || driver === 'sphinxql';
|
||||
};
|
||||
|
||||
const parseSchemaAndName = (fullName: string): { schema: string; name: string } => {
|
||||
const raw = String(fullName || '').trim();
|
||||
const idx = raw.lastIndexOf('.');
|
||||
if (idx > 0 && idx < raw.length - 1) {
|
||||
return { schema: raw.substring(0, idx), name: raw.substring(idx + 1) };
|
||||
}
|
||||
return { schema: '', name: raw };
|
||||
};
|
||||
|
||||
const buildShowViewQueries = (dialect: string, viewName: string, dbName: string): string[] => {
|
||||
const { schema, name } = parseSchemaAndName(viewName);
|
||||
const safeName = escapeSQLLiteral(name);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE VIEW \`${name.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT VIEW_DEFINITION AS view_definition FROM information_schema.views WHERE table_schema = '${safeDbName}' AND table_name = '${safeName}' LIMIT 1`
|
||||
: '',
|
||||
`SHOW CREATE TABLE \`${name.replace(/`/g, '``')}\``,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
const schemaRef = schema || 'public';
|
||||
return [`SELECT pg_get_viewdef('${escapeSQLLiteral(schemaRef)}.${safeName}'::regclass, true) AS view_definition`];
|
||||
}
|
||||
case 'sqlserver':
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${escapeSQLLiteral(viewName)}')) AS view_definition`];
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
if (schema) {
|
||||
return [`SELECT TEXT AS view_definition FROM ALL_VIEWS WHERE OWNER = '${escapeSQLLiteral(schema).toUpperCase()}' AND VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
}
|
||||
if (safeDbName) {
|
||||
return [`SELECT TEXT AS view_definition FROM ALL_VIEWS WHERE OWNER = '${safeDbName.toUpperCase()}' AND VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
}
|
||||
return [`SELECT TEXT AS view_definition FROM USER_VIEWS WHERE VIEW_NAME = '${safeName.toUpperCase()}'`];
|
||||
case 'sqlite':
|
||||
return [`SELECT sql AS view_definition FROM sqlite_master WHERE type='view' AND name='${safeName}'`];
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的视图定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const buildShowRoutineQueries = (dialect: string, routineName: string, routineType: string, dbName: string): string[] => {
|
||||
const { schema, name } = parseSchemaAndName(routineName);
|
||||
const safeName = escapeSQLLiteral(name);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
const upperType = (routineType || 'FUNCTION').toUpperCase();
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE ${upperType} \`${name.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT ROUTINE_DEFINITION AS routine_definition, ROUTINE_TYPE AS routine_type FROM information_schema.routines WHERE routine_schema = '${safeDbName}' AND routine_name = '${safeName}' LIMIT 1`
|
||||
: '',
|
||||
upperType === 'PROCEDURE'
|
||||
? `SHOW PROCEDURE STATUS LIKE '${safeName}'`
|
||||
: `SHOW FUNCTION STATUS LIKE '${safeName}'`,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
const schemaRef = schema || 'public';
|
||||
return [`SELECT pg_get_functiondef(p.oid) AS routine_definition FROM pg_proc p JOIN pg_namespace n ON p.pronamespace = n.oid WHERE n.nspname = '${escapeSQLLiteral(schemaRef)}' AND p.proname = '${safeName}' LIMIT 1`];
|
||||
}
|
||||
case 'sqlserver':
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${escapeSQLLiteral(routineName)}')) AS routine_definition`];
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
const owner = schema ? escapeSQLLiteral(schema).toUpperCase() : (safeDbName ? safeDbName.toUpperCase() : '');
|
||||
if (owner) {
|
||||
return [`SELECT TEXT FROM ALL_SOURCE WHERE OWNER = '${owner}' AND NAME = '${safeName.toUpperCase()}' AND TYPE = '${upperType}' ORDER BY LINE`];
|
||||
}
|
||||
return [`SELECT TEXT FROM USER_SOURCE WHERE NAME = '${safeName.toUpperCase()}' AND TYPE = '${upperType}' ORDER BY LINE`];
|
||||
}
|
||||
case 'sqlite':
|
||||
return [`-- SQLite 不支持存储函数/存储过程`];
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的函数/存储过程定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const runQueryCandidates = async (
|
||||
config: Record<string, any>,
|
||||
dbName: string,
|
||||
queries: string[]
|
||||
): Promise<{ success: boolean; data: any[]; message?: string }> => {
|
||||
let lastMessage = '';
|
||||
let hasSuccessfulQuery = false;
|
||||
for (const query of queries) {
|
||||
const sql = String(query || '').trim();
|
||||
if (!sql) continue;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, sql);
|
||||
if (!result.success || !Array.isArray(result.data)) {
|
||||
lastMessage = result.message || lastMessage;
|
||||
continue;
|
||||
}
|
||||
hasSuccessfulQuery = true;
|
||||
if (result.data.length > 0) {
|
||||
return { success: true, data: result.data };
|
||||
}
|
||||
} catch (error: any) {
|
||||
lastMessage = error?.message || String(error);
|
||||
}
|
||||
}
|
||||
if (hasSuccessfulQuery) {
|
||||
return { success: true, data: [] };
|
||||
}
|
||||
return { success: false, data: [], message: lastMessage };
|
||||
};
|
||||
|
||||
const getVersionHint = async (config: Record<string, any>, dbName: string): Promise<string> => {
|
||||
const candidates = [
|
||||
`SELECT VERSION() AS version`,
|
||||
`SHOW VARIABLES LIKE 'version'`,
|
||||
];
|
||||
for (const query of candidates) {
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, query);
|
||||
if (!result.success || !Array.isArray(result.data) || result.data.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const row = result.data[0] as Record<string, any>;
|
||||
const version =
|
||||
row.version
|
||||
|| row.VERSION
|
||||
|| row.Value
|
||||
|| row.value
|
||||
|| Object.values(row)[1]
|
||||
|| Object.values(row)[0];
|
||||
const text = String(version || '').trim();
|
||||
if (text) return text;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const extractViewDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) return '-- 未找到视图定义';
|
||||
const row = data[0];
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
const keys = Object.keys(row);
|
||||
const textDefinition = row.view_definition || row.VIEW_DEFINITION;
|
||||
if (textDefinition) return String(textDefinition);
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('create view') || k.toLowerCase() === 'create view');
|
||||
if (sqlKey) return row[sqlKey];
|
||||
const tableSqlKey = keys.find(k => k.toLowerCase().includes('create table'));
|
||||
if (tableSqlKey) return row[tableSqlKey];
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE') && (val.toUpperCase().includes('VIEW') || val.toUpperCase().includes('TABLE'))) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
return row.view_definition || row.VIEW_DEFINITION || row.text || row.TEXT || Object.values(row)[0] || '';
|
||||
default:
|
||||
return row.view_definition || row.VIEW_DEFINITION || row.sql || row.SQL || Object.values(row)[0] || '';
|
||||
}
|
||||
};
|
||||
|
||||
const extractRoutineDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) return '-- 未找到函数/存储过程定义';
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
const row = data[0];
|
||||
const keys = Object.keys(row);
|
||||
if (row.routine_definition || row.ROUTINE_DEFINITION) {
|
||||
return String(row.routine_definition || row.ROUTINE_DEFINITION);
|
||||
}
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('create function') || k.toLowerCase().includes('create procedure'));
|
||||
if (sqlKey) return row[sqlKey];
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE') && (val.toUpperCase().includes('FUNCTION') || val.toUpperCase().includes('PROCEDURE'))) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
const routineName = String(row.Name || row.name || '').trim();
|
||||
if (routineName) {
|
||||
const routineType = String(row.Type || row.type || row.ROUTINE_TYPE || row.routine_type || 'FUNCTION').trim().toUpperCase();
|
||||
return `-- 当前数据源未返回可执行定义文本,已返回元数据\n-- 名称: ${routineName}\n-- 类型: ${routineType}\n${JSON.stringify(row, null, 2)}`;
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
// Oracle/DM ALL_SOURCE returns multiple rows, one per line
|
||||
return data.map(row => row.text || row.TEXT || Object.values(row)[0] || '').join('');
|
||||
}
|
||||
default: {
|
||||
const row = data[0];
|
||||
return row.routine_definition || row.ROUTINE_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const loadDefinition = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
setError('未找到数据库连接');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const dialect = getMetadataDialect(conn);
|
||||
const sphinxLike = isSphinxConnection(conn) && dialect === 'mysql';
|
||||
|
||||
let queries: string[];
|
||||
let extractFn: (dialect: string, data: any[]) => string;
|
||||
let objectLabel: string;
|
||||
|
||||
if (tab.type === 'view-def') {
|
||||
const viewName = tab.viewName || '';
|
||||
if (!viewName) {
|
||||
setError('视图名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
queries = buildShowViewQueries(dialect, viewName, dbName);
|
||||
extractFn = extractViewDefinition;
|
||||
objectLabel = '视图';
|
||||
} else {
|
||||
const routineName = tab.routineName || '';
|
||||
const routineType = tab.routineType || 'FUNCTION';
|
||||
if (!routineName) {
|
||||
setError('函数/存储过程名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
queries = buildShowRoutineQueries(dialect, routineName, routineType, dbName);
|
||||
extractFn = extractRoutineDefinition;
|
||||
objectLabel = '函数/存储过程';
|
||||
}
|
||||
|
||||
if (!queries.length || String(queries[0] || '').startsWith('--')) {
|
||||
setDefinition(String(queries[0] || '-- 暂不支持该对象定义查看'));
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const result = await runQueryCandidates(config, dbName, queries);
|
||||
|
||||
if (result.success && Array.isArray(result.data) && result.data.length > 0) {
|
||||
const def = extractFn(dialect, result.data);
|
||||
setDefinition(def);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.success) {
|
||||
if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setDefinition(`-- 当前 Sphinx 实例${versionText}未返回${objectLabel}定义。\n-- 已执行多套兼容查询,可能是版本能力限制或对象类型不支持。`);
|
||||
return;
|
||||
}
|
||||
setDefinition(`-- 未找到${objectLabel}定义`);
|
||||
} else if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setDefinition(`-- 当前 Sphinx 实例${versionText}不支持${objectLabel}定义查询。\n-- 已自动尝试兼容语句,返回失败信息: ${result.message || 'unknown error'}`);
|
||||
} else {
|
||||
setError(result.message || '查询定义失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('查询定义失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadDefinition();
|
||||
}, [tab.connectionId, tab.dbName, tab.viewName, tab.routineName, tab.routineType, tab.type, connections]);
|
||||
|
||||
const objectLabel = tab.type === 'view-def' ? '视图' : '函数/存储过程';
|
||||
const objectName = tab.type === 'view-def' ? tab.viewName : tab.routineName;
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
<Spin tip={`加载${objectLabel}定义...`} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div style={{ padding: 16 }}>
|
||||
<Alert type="error" message="加载失败" description={error} showIcon />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px 16px', borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0' }}>
|
||||
<strong>{objectLabel}: </strong>{objectName}
|
||||
{tab.dbName && <span style={{ marginLeft: 16, color: '#888' }}>数据库: {tab.dbName}</span>}
|
||||
{tab.routineType && <span style={{ marginLeft: 16, color: '#888' }}>类型: {tab.routineType}</span>}
|
||||
</div>
|
||||
<div style={{ flex: 1, minHeight: 0 }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={definition}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default DefinitionViewer;
|
||||
250
frontend/src/components/ImportPreviewModal.tsx
Normal file
250
frontend/src/components/ImportPreviewModal.tsx
Normal file
@@ -0,0 +1,250 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Modal, Table, Alert, Progress, Button, Space } from 'antd';
|
||||
import { CheckCircleOutlined, CloseCircleOutlined } from '@ant-design/icons';
|
||||
import { PreviewImportFile, ImportDataWithProgress } from '../../wailsjs/go/app/App';
|
||||
import { EventsOn, EventsOff } from '../../wailsjs/runtime/runtime';
|
||||
import { useStore } from '../store';
|
||||
|
||||
interface ImportPreviewModalProps {
|
||||
visible: boolean;
|
||||
filePath: string;
|
||||
connectionId: string;
|
||||
dbName: string;
|
||||
tableName: string;
|
||||
onClose: () => void;
|
||||
onSuccess: () => void;
|
||||
}
|
||||
|
||||
interface PreviewData {
|
||||
columns: string[];
|
||||
totalRows: number;
|
||||
previewRows: any[];
|
||||
}
|
||||
|
||||
interface ImportProgress {
|
||||
current: number;
|
||||
total: number;
|
||||
success: number;
|
||||
errors: number;
|
||||
}
|
||||
|
||||
const ImportPreviewModal: React.FC<ImportPreviewModalProps> = ({
|
||||
visible,
|
||||
filePath,
|
||||
connectionId,
|
||||
dbName,
|
||||
tableName,
|
||||
onClose,
|
||||
onSuccess
|
||||
}) => {
|
||||
const connections = useStore(state => state.connections);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [previewData, setPreviewData] = useState<PreviewData | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [importing, setImporting] = useState(false);
|
||||
const [progress, setProgress] = useState<ImportProgress | null>(null);
|
||||
const [importResult, setImportResult] = useState<any>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (visible && filePath) {
|
||||
loadPreview();
|
||||
}
|
||||
}, [visible, filePath]);
|
||||
|
||||
useEffect(() => {
|
||||
if (importing) {
|
||||
const unsubscribe = EventsOn('import:progress', (data: ImportProgress) => {
|
||||
setProgress(data);
|
||||
});
|
||||
return () => {
|
||||
EventsOff('import:progress');
|
||||
};
|
||||
}
|
||||
}, [importing]);
|
||||
|
||||
const loadPreview = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const res = await PreviewImportFile(filePath);
|
||||
if (res.success && res.data) {
|
||||
setPreviewData({
|
||||
columns: res.data.columns || [],
|
||||
totalRows: res.data.totalRows || 0,
|
||||
previewRows: res.data.previewRows || []
|
||||
});
|
||||
} else {
|
||||
setError(res.message || '预览失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('预览失败: ' + e.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleImport = async () => {
|
||||
if (!previewData) return;
|
||||
|
||||
setImporting(true);
|
||||
setProgress({ current: 0, total: previewData.totalRows, success: 0, errors: 0 });
|
||||
setImportResult(null);
|
||||
|
||||
try {
|
||||
const conn = connections.find(c => c.id === connectionId);
|
||||
if (!conn) {
|
||||
setError('连接配置未找到');
|
||||
setImporting(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const res = await ImportDataWithProgress(config as any, dbName, tableName, filePath);
|
||||
|
||||
if (res.success && res.data) {
|
||||
setImportResult(res.data);
|
||||
if (res.data.failed === 0) {
|
||||
onSuccess();
|
||||
}
|
||||
} else {
|
||||
setError(res.message || '导入失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('导入失败: ' + e.message);
|
||||
} finally {
|
||||
setImporting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const columns = previewData?.columns.map(col => ({
|
||||
title: col,
|
||||
dataIndex: col,
|
||||
key: col,
|
||||
ellipsis: true,
|
||||
width: 150
|
||||
})) || [];
|
||||
|
||||
const progressPercent = progress ? Math.round((progress.current / progress.total) * 100) : 0;
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title="导入数据预览"
|
||||
open={visible}
|
||||
onCancel={onClose}
|
||||
width={900}
|
||||
footer={
|
||||
importResult ? (
|
||||
<Space>
|
||||
<Button onClick={onClose}>关闭</Button>
|
||||
</Space>
|
||||
) : importing ? null : (
|
||||
<Space>
|
||||
<Button onClick={onClose}>取消</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleImport}
|
||||
disabled={!previewData || loading}
|
||||
>
|
||||
开始导入
|
||||
</Button>
|
||||
</Space>
|
||||
)
|
||||
}
|
||||
>
|
||||
{error && <Alert type="error" message={error} style={{ marginBottom: 16 }} showIcon />}
|
||||
|
||||
{loading && <div style={{ textAlign: 'center', padding: 40 }}>加载预览数据...</div>}
|
||||
|
||||
{!loading && previewData && !importing && !importResult && (
|
||||
<>
|
||||
<Alert
|
||||
type="info"
|
||||
message={`共 ${previewData.totalRows} 行数据,${previewData.columns.length} 个字段`}
|
||||
description='以下是前 5 行预览数据,确认无误后点击“开始导入”'
|
||||
style={{ marginBottom: 16 }}
|
||||
showIcon
|
||||
/>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600 }}>字段列表:</div>
|
||||
<div style={{ marginBottom: 16, padding: 8, background: '#f5f5f5', borderRadius: 4 }}>
|
||||
{previewData.columns.join(', ')}
|
||||
</div>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600 }}>数据预览(前 5 行):</div>
|
||||
<Table
|
||||
dataSource={previewData.previewRows}
|
||||
columns={columns}
|
||||
pagination={false}
|
||||
scroll={{ x: 'max-content' }}
|
||||
size="small"
|
||||
bordered
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
{importing && progress && (
|
||||
<div style={{ padding: '40px 20px' }}>
|
||||
<div style={{ marginBottom: 16, fontSize: 16, fontWeight: 600, textAlign: 'center' }}>
|
||||
正在导入数据...
|
||||
</div>
|
||||
<Progress percent={progressPercent} status="active" />
|
||||
<div style={{ marginTop: 16, textAlign: 'center', color: '#666' }}>
|
||||
已处理 {progress.current} / {progress.total} 行
|
||||
<span style={{ marginLeft: 16, color: '#52c41a' }}>
|
||||
<CheckCircleOutlined /> 成功 {progress.success}
|
||||
</span>
|
||||
{progress.errors > 0 && (
|
||||
<span style={{ marginLeft: 16, color: '#ff4d4f' }}>
|
||||
<CloseCircleOutlined /> 失败 {progress.errors}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importResult && (
|
||||
<div style={{ padding: 20 }}>
|
||||
<Alert
|
||||
type={importResult.failed === 0 ? 'success' : 'warning'}
|
||||
message="导入完成"
|
||||
description={
|
||||
<div>
|
||||
<div>成功导入 {importResult.success} 行</div>
|
||||
{importResult.failed > 0 && <div>失败 {importResult.failed} 行</div>}
|
||||
</div>
|
||||
}
|
||||
showIcon
|
||||
style={{ marginBottom: 16 }}
|
||||
/>
|
||||
{importResult.errorLogs && importResult.errorLogs.length > 0 && (
|
||||
<>
|
||||
<div style={{ marginBottom: 8, fontWeight: 600, color: '#ff4d4f' }}>错误日志:</div>
|
||||
<div style={{
|
||||
maxHeight: 300,
|
||||
overflow: 'auto',
|
||||
background: '#fff1f0',
|
||||
border: '1px solid #ffccc7',
|
||||
borderRadius: 4,
|
||||
padding: 12,
|
||||
fontSize: 12,
|
||||
fontFamily: 'monospace'
|
||||
}}>
|
||||
{importResult.errorLogs.map((log: string, idx: number) => (
|
||||
<div key={idx} style={{ marginBottom: 4 }}>{log}</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default ImportPreviewModal;
|
||||
@@ -2,6 +2,7 @@ import React, { useRef, useEffect } from 'react';
|
||||
import { Table, Tag, Button, Tooltip } from 'antd';
|
||||
import { ClearOutlined, CloseOutlined, CaretRightOutlined, BugOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
|
||||
interface LogPanelProps {
|
||||
height: number;
|
||||
@@ -10,7 +11,26 @@ interface LogPanelProps {
|
||||
}
|
||||
|
||||
const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) => {
|
||||
const { sqlLogs, clearSqlLogs, darkMode } = useStore();
|
||||
const sqlLogs = useStore(state => state.sqlLogs);
|
||||
const clearSqlLogs = useStore(state => state.clearSqlLogs);
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
|
||||
// Background Helper
|
||||
const getBg = (darkHex: string) => {
|
||||
if (!darkMode) return `rgba(255, 255, 255, ${opacity})`;
|
||||
const hex = darkHex.replace('#', '');
|
||||
const r = parseInt(hex.substring(0, 2), 16);
|
||||
const g = parseInt(hex.substring(2, 4), 16);
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
||||
const bgMain = getBg('#1f1f1f');
|
||||
const bgToolbar = getBg('#2a2a2a');
|
||||
const logScrollbarThumb = darkMode ? 'rgba(255, 255, 255, 0.34)' : 'rgba(0, 0, 0, 0.26)';
|
||||
const logScrollbarThumbHover = darkMode ? 'rgba(255, 255, 255, 0.5)' : 'rgba(0, 0, 0, 0.36)';
|
||||
|
||||
const columns = [
|
||||
{
|
||||
@@ -51,8 +71,8 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return (
|
||||
<div style={{
|
||||
height,
|
||||
borderTop: darkMode ? '1px solid #303030' : '1px solid #d9d9d9',
|
||||
background: darkMode ? '#1f1f1f' : '#fff',
|
||||
borderTop: 'none',
|
||||
background: bgMain,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
position: 'relative',
|
||||
@@ -75,11 +95,10 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
{/* Toolbar */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0',
|
||||
borderBottom: 'none',
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
background: darkMode ? '#2a2a2a' : '#fafafa',
|
||||
height: 32
|
||||
}}>
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8, fontWeight: 'bold', fontSize: '12px' }}>
|
||||
@@ -96,8 +115,9 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
</div>
|
||||
|
||||
{/* List */}
|
||||
<div style={{ flex: 1, overflow: 'auto' }}>
|
||||
<div className="log-panel-scroll" style={{ flex: 1, overflow: 'auto' }}>
|
||||
<Table
|
||||
className="log-panel-table"
|
||||
dataSource={sqlLogs}
|
||||
columns={columns}
|
||||
size="small"
|
||||
@@ -107,8 +127,37 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
// scroll={{ y: height - 32 }} // Let flex handle it
|
||||
/>
|
||||
</div>
|
||||
<style>{`
|
||||
.log-panel-scroll {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: ${logScrollbarThumb} transparent;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-track,
|
||||
.log-panel-scroll::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-thumb {
|
||||
background: ${logScrollbarThumb};
|
||||
border-radius: 8px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: padding-box;
|
||||
}
|
||||
.log-panel-scroll::-webkit-scrollbar-thumb:hover {
|
||||
background: ${logScrollbarThumbHover};
|
||||
background-clip: padding-box;
|
||||
}
|
||||
.log-panel-table .ant-table,
|
||||
.log-panel-table .ant-table-container,
|
||||
.log-panel-table .ant-table-tbody > tr > td {
|
||||
background: transparent !important;
|
||||
}
|
||||
`}</style>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LogPanel;
|
||||
export default LogPanel;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
205
frontend/src/components/RedisCommandEditor.tsx
Normal file
205
frontend/src/components/RedisCommandEditor.tsx
Normal file
@@ -0,0 +1,205 @@
|
||||
import React, { useState, useCallback, useRef } from 'react';
|
||||
import { Button, Space, message } from 'antd';
|
||||
import { PlayCircleOutlined, ClearOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import Editor, { OnMount } from '@monaco-editor/react';
|
||||
|
||||
interface RedisCommandEditorProps {
|
||||
connectionId: string;
|
||||
redisDB: number;
|
||||
}
|
||||
|
||||
interface CommandResult {
|
||||
command: string;
|
||||
result: any;
|
||||
error?: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
const RedisCommandEditor: React.FC<RedisCommandEditorProps> = ({ connectionId, redisDB }) => {
|
||||
const { connections } = useStore();
|
||||
const connection = connections.find(c => c.id === connectionId);
|
||||
|
||||
const [command, setCommand] = useState('');
|
||||
const [results, setResults] = useState<CommandResult[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const editorRef = useRef<any>(null);
|
||||
|
||||
const getConfig = useCallback(() => {
|
||||
if (!connection) return null;
|
||||
return {
|
||||
...connection.config,
|
||||
port: Number(connection.config.port),
|
||||
password: connection.config.password || "",
|
||||
useSSH: connection.config.useSSH || false,
|
||||
ssh: connection.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
redisDB: redisDB
|
||||
};
|
||||
}, [connection, redisDB]);
|
||||
|
||||
const handleEditorMount: OnMount = (editor) => {
|
||||
editorRef.current = editor;
|
||||
// Add keyboard shortcut for execute
|
||||
editor.addCommand(
|
||||
// Ctrl/Cmd + Enter
|
||||
2048 | 3, // KeyMod.CtrlCmd | KeyCode.Enter
|
||||
() => handleExecute()
|
||||
);
|
||||
};
|
||||
|
||||
const handleExecute = async () => {
|
||||
const config = getConfig();
|
||||
if (!config) return;
|
||||
|
||||
const cmdToExecute = command.trim();
|
||||
if (!cmdToExecute) {
|
||||
message.warning('请输入命令');
|
||||
return;
|
||||
}
|
||||
|
||||
// Support multiple commands separated by newlines
|
||||
const commands = cmdToExecute.split('\n').filter(c => c.trim() && !c.trim().startsWith('//') && !c.trim().startsWith('#'));
|
||||
|
||||
setLoading(true);
|
||||
const newResults: CommandResult[] = [];
|
||||
|
||||
for (const cmd of commands) {
|
||||
const trimmedCmd = cmd.trim();
|
||||
if (!trimmedCmd) continue;
|
||||
|
||||
try {
|
||||
const res = await (window as any).go.app.App.RedisExecuteCommand(config, trimmedCmd);
|
||||
newResults.push({
|
||||
command: trimmedCmd,
|
||||
result: res.success ? res.data : null,
|
||||
error: res.success ? undefined : res.message,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
} catch (e: any) {
|
||||
newResults.push({
|
||||
command: trimmedCmd,
|
||||
result: null,
|
||||
error: e?.message || String(e),
|
||||
timestamp: Date.now()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
setResults(prev => [...newResults, ...prev]);
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const handleClear = () => {
|
||||
setResults([]);
|
||||
};
|
||||
|
||||
const formatResult = (result: any): string => {
|
||||
if (result === null || result === undefined) {
|
||||
return '(nil)';
|
||||
}
|
||||
if (typeof result === 'string') {
|
||||
return `"${result}"`;
|
||||
}
|
||||
if (typeof result === 'number') {
|
||||
return `(integer) ${result}`;
|
||||
}
|
||||
if (Array.isArray(result)) {
|
||||
if (result.length === 0) {
|
||||
return '(empty array)';
|
||||
}
|
||||
return result.map((item, index) => `${index + 1}) ${formatResult(item)}`).join('\n');
|
||||
}
|
||||
if (typeof result === 'object') {
|
||||
return JSON.stringify(result, null, 2);
|
||||
}
|
||||
return String(result);
|
||||
};
|
||||
|
||||
if (!connection) {
|
||||
return <div style={{ padding: 20 }}>连接不存在</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
{/* Command Input */}
|
||||
<div style={{ borderBottom: '1px solid #f0f0f0' }}>
|
||||
<div style={{ padding: '8px 12px', borderBottom: '1px solid #f0f0f0', display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Space>
|
||||
<span style={{ fontWeight: 500 }}>Redis 命令</span>
|
||||
<span style={{ color: '#999', fontSize: 12 }}>db{redisDB}</span>
|
||||
</Space>
|
||||
<Space>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<PlayCircleOutlined />}
|
||||
onClick={handleExecute}
|
||||
loading={loading}
|
||||
>
|
||||
执行 (Ctrl+Enter)
|
||||
</Button>
|
||||
<Button icon={<ClearOutlined />} onClick={handleClear}>清空结果</Button>
|
||||
</Space>
|
||||
</div>
|
||||
<Editor
|
||||
height="150px"
|
||||
defaultLanguage="plaintext"
|
||||
value={command}
|
||||
onChange={(value) => setCommand(value || '')}
|
||||
onMount={handleEditorMount}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
lineNumbers: 'on',
|
||||
fontSize: 14,
|
||||
wordWrap: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
automaticLayout: true,
|
||||
tabSize: 2
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Results */}
|
||||
<div style={{ flex: 1, overflow: 'auto', background: '#1e1e1e', color: '#d4d4d4', fontFamily: 'monospace' }}>
|
||||
{results.length === 0 ? (
|
||||
<div style={{ padding: 20, color: '#666', textAlign: 'center' }}>
|
||||
输入 Redis 命令并按 Ctrl+Enter 执行
|
||||
<br />
|
||||
<span style={{ fontSize: 12 }}>支持多行命令,每行一个命令</span>
|
||||
</div>
|
||||
) : (
|
||||
results.map((item, index) => (
|
||||
<div key={item.timestamp + index} style={{ padding: '8px 12px', borderBottom: '1px solid #333' }}>
|
||||
<div style={{ color: '#569cd6', marginBottom: 4 }}>
|
||||
> {item.command}
|
||||
</div>
|
||||
{item.error ? (
|
||||
<div style={{ color: '#f14c4c', whiteSpace: 'pre-wrap' }}>
|
||||
(error) {item.error}
|
||||
</div>
|
||||
) : (
|
||||
<div style={{ color: '#ce9178', whiteSpace: 'pre-wrap' }}>
|
||||
{formatResult(item.result)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Common Commands Help */}
|
||||
<div style={{ padding: '8px 12px', borderTop: '1px solid #f0f0f0', background: '#fafafa', fontSize: 12, color: '#666' }}>
|
||||
常用命令:
|
||||
<span style={{ marginLeft: 8 }}>
|
||||
<code>KEYS *</code> |
|
||||
<code style={{ marginLeft: 8 }}>GET key</code> |
|
||||
<code style={{ marginLeft: 8 }}>SET key value</code> |
|
||||
<code style={{ marginLeft: 8 }}>HGETALL key</code> |
|
||||
<code style={{ marginLeft: 8 }}>INFO</code> |
|
||||
<code style={{ marginLeft: 8 }}>DBSIZE</code>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default RedisCommandEditor;
|
||||
1892
frontend/src/components/RedisViewer.tsx
Normal file
1892
frontend/src/components/RedisViewer.tsx
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,44 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { Tabs, Button } from 'antd';
|
||||
import { Tabs, Dropdown } from 'antd';
|
||||
import type { MenuProps } from 'antd';
|
||||
import { useStore } from '../store';
|
||||
import DataViewer from './DataViewer';
|
||||
import QueryEditor from './QueryEditor';
|
||||
import TableDesigner from './TableDesigner';
|
||||
import RedisViewer from './RedisViewer';
|
||||
import RedisCommandEditor from './RedisCommandEditor';
|
||||
import TriggerViewer from './TriggerViewer';
|
||||
import DefinitionViewer from './DefinitionViewer';
|
||||
import type { TabData } from '../types';
|
||||
|
||||
const detectConnectionEnvLabel = (connectionName: string): string | null => {
|
||||
const tokens = connectionName.toLowerCase().split(/[^a-z0-9]+/).filter(Boolean);
|
||||
if (tokens.includes('prod') || tokens.includes('production')) return 'PROD';
|
||||
if (tokens.includes('uat')) return 'UAT';
|
||||
if (tokens.includes('dev') || tokens.includes('development')) return 'DEV';
|
||||
if (tokens.includes('sit')) return 'SIT';
|
||||
if (tokens.includes('stg') || tokens.includes('stage') || tokens.includes('staging') || tokens.includes('pre')) return 'STG';
|
||||
if (tokens.includes('test') || tokens.includes('qa')) return 'TEST';
|
||||
return null;
|
||||
};
|
||||
|
||||
const buildTabDisplayTitle = (tab: TabData, connectionName: string | undefined): string => {
|
||||
if (tab.type !== 'table' && tab.type !== 'design') return tab.title;
|
||||
if (!connectionName) return tab.title;
|
||||
const prefix = detectConnectionEnvLabel(connectionName) || connectionName;
|
||||
return `[${prefix}] ${tab.title}`;
|
||||
};
|
||||
|
||||
const TabManager: React.FC = () => {
|
||||
const { tabs, activeTabId, setActiveTab, closeTab } = useStore();
|
||||
const tabs = useStore(state => state.tabs);
|
||||
const connections = useStore(state => state.connections);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
const setActiveTab = useStore(state => state.setActiveTab);
|
||||
const closeTab = useStore(state => state.closeTab);
|
||||
const closeOtherTabs = useStore(state => state.closeOtherTabs);
|
||||
const closeTabsToLeft = useStore(state => state.closeTabsToLeft);
|
||||
const closeTabsToRight = useStore(state => state.closeTabsToRight);
|
||||
const closeAllTabs = useStore(state => state.closeAllTabs);
|
||||
|
||||
const onChange = (newActiveKey: string) => {
|
||||
setActiveTab(newActiveKey);
|
||||
@@ -18,7 +50,9 @@ const TabManager: React.FC = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const items = useMemo(() => tabs.map(tab => {
|
||||
const items = useMemo(() => tabs.map((tab, index) => {
|
||||
const connectionName = connections.find((conn) => conn.id === tab.connectionId)?.name;
|
||||
const displayTitle = buildTabDisplayTitle(tab, connectionName);
|
||||
let content;
|
||||
if (tab.type === 'query') {
|
||||
content = <QueryEditor tab={tab} />;
|
||||
@@ -26,28 +60,112 @@ const TabManager: React.FC = () => {
|
||||
content = <DataViewer tab={tab} />;
|
||||
} else if (tab.type === 'design') {
|
||||
content = <TableDesigner tab={tab} />;
|
||||
} else if (tab.type === 'redis-keys') {
|
||||
content = <RedisViewer connectionId={tab.connectionId} redisDB={tab.redisDB ?? 0} />;
|
||||
} else if (tab.type === 'redis-command') {
|
||||
content = <RedisCommandEditor connectionId={tab.connectionId} redisDB={tab.redisDB ?? 0} />;
|
||||
} else if (tab.type === 'trigger') {
|
||||
content = <TriggerViewer tab={tab} />;
|
||||
} else if (tab.type === 'view-def' || tab.type === 'routine-def') {
|
||||
content = <DefinitionViewer tab={tab} />;
|
||||
}
|
||||
|
||||
const menuItems: MenuProps['items'] = [
|
||||
{
|
||||
key: 'close-other',
|
||||
label: '关闭其他页',
|
||||
disabled: tabs.length <= 1,
|
||||
onClick: () => closeOtherTabs(tab.id),
|
||||
},
|
||||
{
|
||||
key: 'close-left',
|
||||
label: '关闭左侧',
|
||||
disabled: index === 0,
|
||||
onClick: () => closeTabsToLeft(tab.id),
|
||||
},
|
||||
{
|
||||
key: 'close-right',
|
||||
label: '关闭右侧',
|
||||
disabled: index === tabs.length - 1,
|
||||
onClick: () => closeTabsToRight(tab.id),
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: 'close-all',
|
||||
label: '关闭所有',
|
||||
disabled: tabs.length === 0,
|
||||
onClick: () => closeAllTabs(),
|
||||
},
|
||||
];
|
||||
|
||||
return {
|
||||
label: tab.title,
|
||||
label: (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span onContextMenu={(e) => e.preventDefault()}>{displayTitle}</span>
|
||||
</Dropdown>
|
||||
),
|
||||
key: tab.id,
|
||||
children: content,
|
||||
};
|
||||
}), [tabs]);
|
||||
}), [tabs, connections, closeOtherTabs, closeTabsToLeft, closeTabsToRight, closeAllTabs]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<style>{`
|
||||
.ant-tabs-content { height: 100%; }
|
||||
.ant-tabs-tabpane { height: 100%; }
|
||||
.main-tabs {
|
||||
height: 100%;
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
.main-tabs .ant-tabs-nav {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
.main-tabs .ant-tabs-content-holder {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.main-tabs .ant-tabs-content {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.main-tabs .ant-tabs-tabpane {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
.main-tabs .ant-tabs-tabpane > div {
|
||||
flex: 1 1 auto;
|
||||
min-height: 0;
|
||||
min-width: 0;
|
||||
}
|
||||
.main-tabs .ant-tabs-tabpane-hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-nav::before {
|
||||
border-bottom: none !important;
|
||||
}
|
||||
`}</style>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={onChange}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
style={{ height: '100%' }}
|
||||
hideAdd
|
||||
/>
|
||||
</>
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import React, { useEffect, useState, useContext, useMemo, useRef } from 'react';
|
||||
import { Table, Tabs, Button, message, Input, Checkbox, Modal, AutoComplete, Tooltip, Select } from 'antd';
|
||||
import { ReloadOutlined, SaveOutlined, PlusOutlined, DeleteOutlined, MenuOutlined, FileTextOutlined } from '@ant-design/icons';
|
||||
import React, { useEffect, useState, useContext, useMemo, useRef, useCallback } from 'react';
|
||||
import { Table, Tabs, Button, message, Input, Checkbox, Modal, AutoComplete, Tooltip, Select, Empty, Space } from 'antd';
|
||||
import { ReloadOutlined, SaveOutlined, PlusOutlined, DeleteOutlined, MenuOutlined, FileTextOutlined, EyeOutlined, EditOutlined, ExclamationCircleOutlined } from '@ant-design/icons';
|
||||
import { DndContext, closestCenter, KeyboardSensor, PointerSensor, useSensor, useSensors, DragOverlay } from '@dnd-kit/core';
|
||||
import { arrayMove, SortableContext, sortableKeyboardCoordinates, verticalListSortingStrategy, useSortable } from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import { Resizable } from 'react-resizable';
|
||||
import Editor, { loader } from '@monaco-editor/react';
|
||||
import { TabData, ColumnDefinition, IndexDefinition, ForeignKeyDefinition, TriggerDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetColumns, DBGetIndexes, DBQuery, DBGetForeignKeys, DBGetTriggers, DBShowCreateTable } from '../../wailsjs/go/app/App';
|
||||
|
||||
// Need styles for react-resizable
|
||||
import 'react-resizable/css/styles.css';
|
||||
|
||||
interface EditableColumn extends ColumnDefinition {
|
||||
_key: string;
|
||||
isNew?: boolean;
|
||||
@@ -57,45 +54,43 @@ const COLLATIONS = {
|
||||
]
|
||||
};
|
||||
|
||||
// --- Resizable Header Component ---
|
||||
// --- Resizable Header Component (Native, same interaction as DataGrid) ---
|
||||
const ResizableTitle = (props: any) => {
|
||||
const { onResize, width, ...restProps } = props;
|
||||
const { onResizeStart, width, ...restProps } = props;
|
||||
const nextStyle = { ...(restProps.style || {}) } as React.CSSProperties;
|
||||
|
||||
if (width) {
|
||||
nextStyle.width = width;
|
||||
}
|
||||
|
||||
if (!width) {
|
||||
return <th {...restProps} />;
|
||||
return <th {...restProps} style={nextStyle} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Resizable
|
||||
width={width}
|
||||
height={0}
|
||||
handle={
|
||||
<span
|
||||
className="react-resizable-handle"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
}}
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault(); // Prevent text selection and focus hijacking
|
||||
}}
|
||||
style={{
|
||||
position: 'absolute',
|
||||
right: -5,
|
||||
bottom: 0,
|
||||
top: 0,
|
||||
width: 10,
|
||||
cursor: 'col-resize',
|
||||
zIndex: 10
|
||||
}}
|
||||
/>
|
||||
}
|
||||
onResize={onResize}
|
||||
draggableOpts={{ enableUserSelectHack: true }}
|
||||
>
|
||||
<th {...restProps} style={{ ...restProps.style, position: 'relative' }} />
|
||||
</Resizable>
|
||||
<th {...restProps} style={{ ...nextStyle, position: 'relative' }}>
|
||||
{restProps.children}
|
||||
<span
|
||||
className="react-resizable-handle"
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation();
|
||||
if (typeof onResizeStart === 'function') {
|
||||
onResizeStart(e);
|
||||
}
|
||||
}}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
style={{
|
||||
position: 'absolute',
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
top: 0,
|
||||
width: 10,
|
||||
cursor: 'col-resize',
|
||||
zIndex: 10,
|
||||
touchAction: 'none',
|
||||
}}
|
||||
/>
|
||||
</th>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -162,13 +157,47 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [previewSql, setPreviewSql] = useState<string>('');
|
||||
const [isPreviewOpen, setIsPreviewOpen] = useState(false);
|
||||
const [activeKey, setActiveKey] = useState(tab.initialTab || "columns");
|
||||
const [selectedTrigger, setSelectedTrigger] = useState<TriggerDefinition | null>(null);
|
||||
const [isTriggerModalOpen, setIsTriggerModalOpen] = useState(false);
|
||||
const [isTriggerEditModalOpen, setIsTriggerEditModalOpen] = useState(false);
|
||||
const [triggerEditMode, setTriggerEditMode] = useState<'create' | 'edit'>('create');
|
||||
const [triggerEditSql, setTriggerEditSql] = useState<string>('');
|
||||
const [triggerExecuting, setTriggerExecuting] = useState(false);
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
const readOnly = !!tab.readOnly;
|
||||
|
||||
const [tableHeight, setTableHeight] = useState(500);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// 初始化透明 Monaco Editor 主题
|
||||
useEffect(() => {
|
||||
loader.init().then(monaco => {
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#ffffff10',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
monaco.editor.defineTheme('transparent-light', {
|
||||
base: 'vs',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#00000010',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
const resizeObserver = new ResizeObserver(entries => {
|
||||
@@ -183,6 +212,14 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
// --- Resizable Columns State ---
|
||||
const [tableColumns, setTableColumns] = useState<any[]>([]);
|
||||
const resizeDragRef = useRef<{ startX: number; startWidth: number; index: number; containerLeft: number } | null>(null);
|
||||
const resizeRafRef = useRef<number | null>(null);
|
||||
const latestResizeXRef = useRef<number | null>(null);
|
||||
const ghostRef = useRef<HTMLDivElement>(null);
|
||||
const resizeListenerRef = useRef<{ move: ((e: MouseEvent) => void) | null; up: ((e: MouseEvent) => void) | null }>({
|
||||
move: null,
|
||||
up: null,
|
||||
});
|
||||
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor),
|
||||
@@ -283,25 +320,97 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setTableColumns(initialCols);
|
||||
}, [readOnly]); // Re-create if readOnly changes
|
||||
|
||||
const rafRef = React.useRef<number | null>(null);
|
||||
const flushResizeGhost = useCallback(() => {
|
||||
resizeRafRef.current = null;
|
||||
if (!resizeDragRef.current || !ghostRef.current) return;
|
||||
if (latestResizeXRef.current === null) return;
|
||||
const relativeLeft = latestResizeXRef.current - resizeDragRef.current.containerLeft;
|
||||
ghostRef.current.style.transform = `translateX(${relativeLeft}px)`;
|
||||
}, []);
|
||||
|
||||
// Resize Handler
|
||||
const handleResize = (index: number) => (_: React.SyntheticEvent, { size }: { size: { width: number } }) => {
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current);
|
||||
}
|
||||
rafRef.current = requestAnimationFrame(() => {
|
||||
setTableColumns((columns) => {
|
||||
const nextColumns = [...columns];
|
||||
nextColumns[index] = {
|
||||
...nextColumns[index],
|
||||
width: size.width,
|
||||
};
|
||||
return nextColumns;
|
||||
const detachResizeListeners = useCallback(() => {
|
||||
if (resizeListenerRef.current.move) {
|
||||
document.removeEventListener('mousemove', resizeListenerRef.current.move);
|
||||
resizeListenerRef.current.move = null;
|
||||
}
|
||||
if (resizeListenerRef.current.up) {
|
||||
document.removeEventListener('mouseup', resizeListenerRef.current.up);
|
||||
resizeListenerRef.current.up = null;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const cleanupResizeState = useCallback(() => {
|
||||
if (resizeRafRef.current !== null) {
|
||||
cancelAnimationFrame(resizeRafRef.current);
|
||||
resizeRafRef.current = null;
|
||||
}
|
||||
latestResizeXRef.current = null;
|
||||
resizeDragRef.current = null;
|
||||
if (ghostRef.current) {
|
||||
ghostRef.current.style.display = 'none';
|
||||
}
|
||||
document.body.style.cursor = '';
|
||||
document.body.style.userSelect = '';
|
||||
}, []);
|
||||
|
||||
const handleResizeStart = useCallback((index: number) => (e: React.MouseEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
const startX = e.clientX;
|
||||
const currentWidth = Number(tableColumns[index]?.width || 200);
|
||||
const containerLeft = containerRef.current?.getBoundingClientRect().left ?? 0;
|
||||
resizeDragRef.current = { startX, startWidth: currentWidth, index, containerLeft };
|
||||
latestResizeXRef.current = startX;
|
||||
|
||||
if (ghostRef.current && containerRef.current) {
|
||||
const relativeLeft = startX - containerLeft;
|
||||
ghostRef.current.style.transform = `translateX(${relativeLeft}px)`;
|
||||
ghostRef.current.style.display = 'block';
|
||||
}
|
||||
|
||||
detachResizeListeners();
|
||||
|
||||
const onMove = (event: MouseEvent) => {
|
||||
if (!resizeDragRef.current) return;
|
||||
latestResizeXRef.current = event.clientX;
|
||||
if (resizeRafRef.current !== null) return;
|
||||
resizeRafRef.current = requestAnimationFrame(flushResizeGhost);
|
||||
};
|
||||
|
||||
const onUp = (event: MouseEvent) => {
|
||||
if (resizeDragRef.current) {
|
||||
const { startX: dragStartX, startWidth, index: dragIndex } = resizeDragRef.current;
|
||||
const deltaX = event.clientX - dragStartX;
|
||||
const newWidth = Math.max(50, startWidth + deltaX);
|
||||
setTableColumns((prevColumns) => {
|
||||
if (!prevColumns[dragIndex]) return prevColumns;
|
||||
const nextColumns = [...prevColumns];
|
||||
nextColumns[dragIndex] = {
|
||||
...nextColumns[dragIndex],
|
||||
width: newWidth,
|
||||
};
|
||||
return nextColumns;
|
||||
});
|
||||
rafRef.current = null;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
detachResizeListeners();
|
||||
cleanupResizeState();
|
||||
};
|
||||
|
||||
resizeListenerRef.current = { move: onMove, up: onUp };
|
||||
document.addEventListener('mousemove', onMove);
|
||||
document.addEventListener('mouseup', onUp);
|
||||
document.body.style.cursor = 'col-resize';
|
||||
document.body.style.userSelect = 'none';
|
||||
}, [cleanupResizeState, detachResizeListeners, flushResizeGhost, tableColumns]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
detachResizeListeners();
|
||||
cleanupResizeState();
|
||||
};
|
||||
}, [cleanupResizeState, detachResizeListeners]);
|
||||
|
||||
const fetchData = async () => {
|
||||
if (isNewTable) return; // Don't fetch for new table
|
||||
@@ -365,6 +474,215 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
fetchData();
|
||||
}, [tab]);
|
||||
|
||||
// --- Trigger Handlers ---
|
||||
|
||||
const getDbType = (): string => {
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
const type = String(conn?.config?.type || '').toLowerCase();
|
||||
if (type === 'mariadb' || type === 'sphinx') return 'mysql';
|
||||
if (type === 'dameng') return 'dm';
|
||||
return type;
|
||||
};
|
||||
|
||||
const generateTriggerTemplate = (): string => {
|
||||
const dbType = getDbType();
|
||||
const tblName = tab.tableName || 'table_name';
|
||||
|
||||
switch (dbType) {
|
||||
case 'mysql':
|
||||
return `CREATE TRIGGER trigger_name
|
||||
BEFORE INSERT ON \`${tblName}\`
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
-- 触发器逻辑
|
||||
END;`;
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase':
|
||||
return `CREATE OR REPLACE FUNCTION trigger_function_name()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- 触发器逻辑
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trigger_name
|
||||
BEFORE INSERT ON "${tblName}"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION trigger_function_name();`;
|
||||
case 'sqlserver':
|
||||
return `CREATE TRIGGER trigger_name
|
||||
ON [${tblName}]
|
||||
AFTER INSERT
|
||||
AS
|
||||
BEGIN
|
||||
SET NOCOUNT ON;
|
||||
-- 触发器逻辑
|
||||
END;`;
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
return `CREATE OR REPLACE TRIGGER trigger_name
|
||||
BEFORE INSERT ON "${tblName}"
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
-- 触发器逻辑
|
||||
NULL;
|
||||
END;`;
|
||||
case 'sqlite':
|
||||
return `CREATE TRIGGER trigger_name
|
||||
AFTER INSERT ON "${tblName}"
|
||||
BEGIN
|
||||
-- 触发器逻辑
|
||||
END;`;
|
||||
default:
|
||||
return `-- 请输入 CREATE TRIGGER 语句`;
|
||||
}
|
||||
};
|
||||
|
||||
const buildDropTriggerSql = (triggerName: string): string => {
|
||||
const dbType = getDbType();
|
||||
const tblName = tab.tableName || '';
|
||||
|
||||
switch (dbType) {
|
||||
case 'mysql':
|
||||
return `DROP TRIGGER IF EXISTS \`${triggerName}\``;
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase':
|
||||
return `DROP TRIGGER IF EXISTS "${triggerName}" ON "${tblName}"`;
|
||||
case 'sqlserver':
|
||||
return `DROP TRIGGER IF EXISTS [${triggerName}]`;
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
return `DROP TRIGGER "${triggerName}"`;
|
||||
case 'sqlite':
|
||||
return `DROP TRIGGER IF EXISTS "${triggerName}"`;
|
||||
default:
|
||||
return `DROP TRIGGER ${triggerName}`;
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateTrigger = () => {
|
||||
setTriggerEditMode('create');
|
||||
setTriggerEditSql(generateTriggerTemplate());
|
||||
setIsTriggerEditModalOpen(true);
|
||||
};
|
||||
|
||||
const handleEditTrigger = () => {
|
||||
if (!selectedTrigger) return;
|
||||
setTriggerEditMode('edit');
|
||||
// 构建完整的 CREATE TRIGGER 语句
|
||||
const dbType = getDbType();
|
||||
const tblName = tab.tableName || '';
|
||||
let createSql = '';
|
||||
|
||||
if (dbType === 'mysql') {
|
||||
createSql = `CREATE TRIGGER \`${selectedTrigger.name}\`
|
||||
${selectedTrigger.timing} ${selectedTrigger.event} ON \`${tblName}\`
|
||||
FOR EACH ROW
|
||||
${selectedTrigger.statement}`;
|
||||
} else {
|
||||
createSql = selectedTrigger.statement || '-- 无法获取完整的触发器定义';
|
||||
}
|
||||
|
||||
setTriggerEditSql(createSql);
|
||||
setIsTriggerEditModalOpen(true);
|
||||
};
|
||||
|
||||
const handleDeleteTrigger = () => {
|
||||
if (!selectedTrigger) return;
|
||||
|
||||
Modal.confirm({
|
||||
title: '确认删除触发器',
|
||||
icon: <ExclamationCircleOutlined />,
|
||||
content: `确定要删除触发器 "${selectedTrigger.name}" 吗?此操作不可撤销。`,
|
||||
okText: '删除',
|
||||
okType: 'danger',
|
||||
cancelText: '取消',
|
||||
onOk: async () => {
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
message.error('未找到连接');
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || "",
|
||||
database: conn.config.database || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
|
||||
const dropSql = buildDropTriggerSql(selectedTrigger.name);
|
||||
|
||||
try {
|
||||
const res = await DBQuery(config as any, tab.dbName || '', dropSql);
|
||||
if (res.success) {
|
||||
message.success('触发器删除成功');
|
||||
setSelectedTrigger(null);
|
||||
fetchData(); // 刷新列表
|
||||
} else {
|
||||
message.error('删除失败: ' + res.message);
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error('删除失败: ' + (e?.message || String(e)));
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleExecuteTriggerSql = async () => {
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
message.error('未找到连接');
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || "",
|
||||
database: conn.config.database || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
|
||||
setTriggerExecuting(true);
|
||||
|
||||
try {
|
||||
// 如果是编辑模式,先删除旧触发器
|
||||
if (triggerEditMode === 'edit' && selectedTrigger) {
|
||||
const dropSql = buildDropTriggerSql(selectedTrigger.name);
|
||||
const dropRes = await DBQuery(config as any, tab.dbName || '', dropSql);
|
||||
if (!dropRes.success) {
|
||||
message.error('删除旧触发器失败: ' + dropRes.message);
|
||||
setTriggerExecuting(false);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// 执行创建语句
|
||||
const res = await DBQuery(config as any, tab.dbName || '', triggerEditSql);
|
||||
if (res.success) {
|
||||
message.success(triggerEditMode === 'create' ? '触发器创建成功' : '触发器修改成功');
|
||||
setIsTriggerEditModalOpen(false);
|
||||
setSelectedTrigger(null);
|
||||
fetchData(); // 刷新列表
|
||||
} else {
|
||||
message.error('执行失败: ' + res.message);
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error('执行失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setTriggerExecuting(false);
|
||||
}
|
||||
};
|
||||
|
||||
// --- Handlers ---
|
||||
|
||||
const handleColumnChange = (key: string, field: keyof EditableColumn, value: any) => {
|
||||
@@ -542,7 +860,7 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
...col,
|
||||
onHeaderCell: (column: any) => ({
|
||||
width: column.width,
|
||||
onResize: handleResize(index),
|
||||
onResizeStart: handleResizeStart(index),
|
||||
}),
|
||||
}));
|
||||
|
||||
@@ -550,7 +868,6 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
<div ref={containerRef} className="table-designer-wrapper" style={{ height: '100%', overflow: 'hidden', position: 'relative' }}>
|
||||
<style>{`
|
||||
.table-designer-wrapper .ant-table-body {
|
||||
height: ${tableHeight}px !important;
|
||||
max-height: ${tableHeight}px !important;
|
||||
}
|
||||
`}</style>
|
||||
@@ -590,6 +907,21 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
</SortableContext>
|
||||
</DndContext>
|
||||
)}
|
||||
<div
|
||||
ref={ghostRef}
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
left: 0,
|
||||
width: '2px',
|
||||
background: '#1890ff',
|
||||
zIndex: 9999,
|
||||
display: 'none',
|
||||
pointerEvents: 'none',
|
||||
willChange: 'transform',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -681,19 +1013,61 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
key: 'triggers',
|
||||
label: '触发器',
|
||||
children: (
|
||||
<Table
|
||||
dataSource={triggers}
|
||||
columns={[
|
||||
{ title: '名', dataIndex: 'name', key: 'name' },
|
||||
{ title: '时间', dataIndex: 'timing', key: 'timing' },
|
||||
{ title: '事件', dataIndex: 'event', key: 'event' },
|
||||
{ title: '语句', dataIndex: 'statement', key: 'statement', ellipsis: true },
|
||||
]}
|
||||
rowKey="name"
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
/>
|
||||
<div>
|
||||
<div style={{ marginBottom: 8, display: 'flex', gap: 8 }}>
|
||||
<Button
|
||||
size="small"
|
||||
icon={<EyeOutlined />}
|
||||
disabled={!selectedTrigger}
|
||||
onClick={() => setIsTriggerModalOpen(true)}
|
||||
>
|
||||
查看语句
|
||||
</Button>
|
||||
<Button size="small" icon={<PlusOutlined />} onClick={handleCreateTrigger}>新增</Button>
|
||||
<Button size="small" icon={<EditOutlined />} disabled={!selectedTrigger} onClick={handleEditTrigger}>修改</Button>
|
||||
<Button size="small" icon={<DeleteOutlined />} danger disabled={!selectedTrigger} onClick={handleDeleteTrigger}>删除</Button>
|
||||
<span style={{ marginLeft: 'auto', color: '#888', fontSize: 12, alignSelf: 'center' }}>
|
||||
{selectedTrigger ? `已选择: ${selectedTrigger.name}` : '请点击选择触发器'}
|
||||
</span>
|
||||
</div>
|
||||
<Table
|
||||
dataSource={triggers}
|
||||
columns={[
|
||||
{ title: '名称', dataIndex: 'name', key: 'name' },
|
||||
{ title: '时机', dataIndex: 'timing', key: 'timing', width: 100 },
|
||||
{ title: '事件', dataIndex: 'event', key: 'event', width: 100 },
|
||||
]}
|
||||
rowKey="name"
|
||||
size="small"
|
||||
pagination={false}
|
||||
loading={loading}
|
||||
locale={{ emptyText: <Empty description="该表暂无触发器" image={Empty.PRESENTED_IMAGE_SIMPLE} /> }}
|
||||
rowSelection={{
|
||||
type: 'radio',
|
||||
selectedRowKeys: selectedTrigger ? [selectedTrigger.name] : [],
|
||||
onChange: (_, selectedRows) => setSelectedTrigger(selectedRows[0] || null),
|
||||
onSelect: (record, selected) => {
|
||||
// 点击单选按钮时,如果已选中则取消
|
||||
if (selectedTrigger?.name === record.name) {
|
||||
setSelectedTrigger(null);
|
||||
} else {
|
||||
setSelectedTrigger(record);
|
||||
}
|
||||
},
|
||||
}}
|
||||
onRow={(record) => ({
|
||||
onClick: () => {
|
||||
// 点击已选中的行时取消选择
|
||||
if (selectedTrigger?.name === record.name) {
|
||||
setSelectedTrigger(null);
|
||||
} else {
|
||||
setSelectedTrigger(record);
|
||||
}
|
||||
},
|
||||
style: { cursor: 'pointer' }
|
||||
})}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
] : []),
|
||||
@@ -702,8 +1076,22 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
label: 'DDL',
|
||||
icon: <FileTextOutlined />,
|
||||
children: (
|
||||
<div style={{ height: 'calc(100vh - 200px)', overflow: 'auto', padding: 10, background: '#f5f5f5', border: '1px solid #eee' }}>
|
||||
<pre>{ddl}</pre>
|
||||
<div style={{ height: 'calc(100vh - 200px)', border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={ddl}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}] : [])
|
||||
@@ -726,6 +1114,75 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
</div>
|
||||
<p style={{ marginTop: 10, color: '#faad14' }}>请仔细检查 SQL,执行后不可撤销。</p>
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
title={selectedTrigger ? `触发器: ${selectedTrigger.name}` : '触发器详情'}
|
||||
open={isTriggerModalOpen}
|
||||
onCancel={() => setIsTriggerModalOpen(false)}
|
||||
footer={null}
|
||||
width={700}
|
||||
>
|
||||
{selectedTrigger && (
|
||||
<div>
|
||||
<div style={{ marginBottom: 12, display: 'flex', gap: 24 }}>
|
||||
<span><strong>时机:</strong> {selectedTrigger.timing}</span>
|
||||
<span><strong>事件:</strong> {selectedTrigger.event}</span>
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={selectedTrigger.statement}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
|
||||
<Modal
|
||||
title={triggerEditMode === 'create' ? '新增触发器' : '修改触发器'}
|
||||
open={isTriggerEditModalOpen}
|
||||
onCancel={() => setIsTriggerEditModalOpen(false)}
|
||||
width={800}
|
||||
okText={triggerEditMode === 'create' ? '创建' : '保存'}
|
||||
cancelText="取消"
|
||||
confirmLoading={triggerExecuting}
|
||||
onOk={handleExecuteTriggerSql}
|
||||
>
|
||||
<div style={{ marginBottom: 8, color: '#888', fontSize: 12 }}>
|
||||
{triggerEditMode === 'edit' && selectedTrigger && (
|
||||
<span>修改触发器时会先删除原触发器,再创建新触发器。</span>
|
||||
)}
|
||||
</div>
|
||||
<div style={{ border: darkMode ? '1px solid #303030' : '1px solid #d9d9d9', borderRadius: 4 }}>
|
||||
<Editor
|
||||
height="350px"
|
||||
language="sql"
|
||||
theme={darkMode ? 'vs-dark' : 'light'}
|
||||
value={triggerEditSql}
|
||||
onChange={(val) => setTriggerEditSql(val || '')}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<p style={{ marginTop: 10, color: '#faad14' }}>请仔细检查 SQL 语句,执行后不可撤销。</p>
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
336
frontend/src/components/TriggerViewer.tsx
Normal file
336
frontend/src/components/TriggerViewer.tsx
Normal file
@@ -0,0 +1,336 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import Editor, { loader } from '@monaco-editor/react';
|
||||
import { Spin, Alert } from 'antd';
|
||||
import { TabData } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery } from '../../wailsjs/go/app/App';
|
||||
|
||||
interface TriggerViewerProps {
|
||||
tab: TabData;
|
||||
}
|
||||
|
||||
const TriggerViewer: React.FC<TriggerViewerProps> = ({ tab }) => {
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [triggerDefinition, setTriggerDefinition] = useState<string>('');
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
|
||||
// 初始化透明 Monaco Editor 主题
|
||||
useEffect(() => {
|
||||
loader.init().then(monaco => {
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#ffffff10',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
monaco.editor.defineTheme('transparent-light', {
|
||||
base: 'vs',
|
||||
inherit: true,
|
||||
rules: [],
|
||||
colors: {
|
||||
'editor.background': '#00000000',
|
||||
'editor.lineHighlightBackground': '#00000010',
|
||||
'editorGutter.background': '#00000000',
|
||||
}
|
||||
});
|
||||
});
|
||||
}, []);
|
||||
|
||||
const escapeSQLLiteral = (raw: string): string => String(raw || '').replace(/'/g, "''");
|
||||
const quoteSqlServerIdentifier = (raw: string): string => `[${String(raw || '').replace(/]/g, ']]')}]`;
|
||||
|
||||
const getMetadataDialect = (conn: any): string => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'custom') {
|
||||
return String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
}
|
||||
if (type === 'mariadb' || type === 'sphinx') return 'mysql';
|
||||
if (type === 'dameng') return 'dm';
|
||||
return type;
|
||||
};
|
||||
|
||||
const isSphinxConnection = (conn: any): boolean => {
|
||||
const type = String(conn?.config?.type || '').trim().toLowerCase();
|
||||
if (type === 'sphinx') return true;
|
||||
if (type !== 'custom') return false;
|
||||
const driver = String(conn?.config?.driver || '').trim().toLowerCase();
|
||||
return driver === 'sphinx' || driver === 'sphinxql';
|
||||
};
|
||||
|
||||
const buildShowTriggerQueries = (dialect: string, triggerName: string, dbName: string): string[] => {
|
||||
const safeTriggerName = escapeSQLLiteral(triggerName);
|
||||
const safeDbName = escapeSQLLiteral(dbName);
|
||||
switch (dialect) {
|
||||
case 'mysql':
|
||||
return [
|
||||
`SHOW CREATE TRIGGER \`${triggerName.replace(/`/g, '``')}\``,
|
||||
safeDbName
|
||||
? `SELECT ACTION_STATEMENT AS trigger_definition FROM information_schema.triggers WHERE trigger_schema = '${safeDbName}' AND trigger_name = '${safeTriggerName}' LIMIT 1`
|
||||
: '',
|
||||
safeDbName
|
||||
? `SHOW TRIGGERS FROM \`${dbName.replace(/`/g, '``')}\` LIKE '${safeTriggerName}'`
|
||||
: `SHOW TRIGGERS LIKE '${safeTriggerName}'`,
|
||||
].filter(Boolean);
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase':
|
||||
return [`SELECT pg_get_triggerdef(t.oid, true) AS trigger_definition
|
||||
FROM pg_trigger t
|
||||
JOIN pg_class c ON t.tgrelid = c.oid
|
||||
WHERE t.tgname = '${safeTriggerName}'
|
||||
AND NOT t.tgisinternal
|
||||
LIMIT 1`];
|
||||
case 'sqlserver': {
|
||||
return [`SELECT OBJECT_DEFINITION(OBJECT_ID('${safeTriggerName.replace(/'/g, "''")}')) AS trigger_definition`];
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm':
|
||||
if (!safeDbName) {
|
||||
return [`SELECT TRIGGER_BODY FROM USER_TRIGGERS WHERE TRIGGER_NAME = '${safeTriggerName.toUpperCase()}'`];
|
||||
}
|
||||
return [`SELECT TRIGGER_BODY FROM ALL_TRIGGERS WHERE OWNER = '${safeDbName.toUpperCase()}' AND TRIGGER_NAME = '${safeTriggerName.toUpperCase()}'`];
|
||||
case 'sqlite':
|
||||
return [`SELECT sql FROM sqlite_master WHERE type = 'trigger' AND name = '${safeTriggerName}'`];
|
||||
case 'tdengine':
|
||||
return [`-- TDengine 不支持触发器`];
|
||||
case 'mongodb':
|
||||
return [`-- MongoDB 不支持触发器`];
|
||||
default:
|
||||
return [`-- 暂不支持该数据库类型的触发器定义查看`];
|
||||
}
|
||||
};
|
||||
|
||||
const runQueryCandidates = async (
|
||||
config: Record<string, any>,
|
||||
dbName: string,
|
||||
queries: string[]
|
||||
): Promise<{ success: boolean; data: any[]; message?: string }> => {
|
||||
let lastMessage = '';
|
||||
let hasSuccessfulQuery = false;
|
||||
for (const query of queries) {
|
||||
const sql = String(query || '').trim();
|
||||
if (!sql) continue;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, sql);
|
||||
if (!result.success || !Array.isArray(result.data)) {
|
||||
lastMessage = result.message || lastMessage;
|
||||
continue;
|
||||
}
|
||||
hasSuccessfulQuery = true;
|
||||
if (result.data.length > 0) {
|
||||
return { success: true, data: result.data };
|
||||
}
|
||||
} catch (error: any) {
|
||||
lastMessage = error?.message || String(error);
|
||||
}
|
||||
}
|
||||
if (hasSuccessfulQuery) {
|
||||
return { success: true, data: [] };
|
||||
}
|
||||
return { success: false, data: [], message: lastMessage };
|
||||
};
|
||||
|
||||
const getVersionHint = async (config: Record<string, any>, dbName: string): Promise<string> => {
|
||||
const candidates = [
|
||||
`SELECT VERSION() AS version`,
|
||||
`SHOW VARIABLES LIKE 'version'`,
|
||||
];
|
||||
for (const query of candidates) {
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, query);
|
||||
if (!result.success || !Array.isArray(result.data) || result.data.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const row = result.data[0] as Record<string, any>;
|
||||
const version =
|
||||
row.version
|
||||
|| row.VERSION
|
||||
|| row.Value
|
||||
|| row.value
|
||||
|| Object.values(row)[1]
|
||||
|| Object.values(row)[0];
|
||||
const text = String(version || '').trim();
|
||||
if (text) return text;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
const extractTriggerDefinition = (dialect: string, data: any[]): string => {
|
||||
if (!data || data.length === 0) {
|
||||
return '-- 未找到触发器定义';
|
||||
}
|
||||
|
||||
const row = data[0];
|
||||
|
||||
switch (dialect) {
|
||||
case 'mysql': {
|
||||
// MySQL SHOW CREATE TRIGGER returns: Trigger, sql_mode, SQL Original Statement, ...
|
||||
const keys = Object.keys(row);
|
||||
if (row.trigger_definition || row.TRIGGER_DEFINITION) {
|
||||
return String(row.trigger_definition || row.TRIGGER_DEFINITION);
|
||||
}
|
||||
if (row.ACTION_STATEMENT || row.action_statement) {
|
||||
return String(row.ACTION_STATEMENT || row.action_statement);
|
||||
}
|
||||
const sqlKey = keys.find(k => k.toLowerCase().includes('statement') || k.toLowerCase() === 'sql original statement');
|
||||
if (sqlKey) return row[sqlKey];
|
||||
// Fallback: try to find any key containing CREATE TRIGGER
|
||||
for (const key of keys) {
|
||||
const val = String(row[key] || '');
|
||||
if (val.toUpperCase().includes('CREATE TRIGGER')) {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
case 'postgres':
|
||||
case 'kingbase':
|
||||
case 'highgo':
|
||||
case 'vastbase': {
|
||||
return row.trigger_definition || row.TRIGGER_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'sqlserver': {
|
||||
return row.trigger_definition || row.TRIGGER_DEFINITION || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'oracle':
|
||||
case 'dm': {
|
||||
return row.trigger_body || row.TRIGGER_BODY || Object.values(row)[0] || '';
|
||||
}
|
||||
case 'sqlite': {
|
||||
return row.sql || row.SQL || Object.values(row)[0] || '';
|
||||
}
|
||||
default:
|
||||
return JSON.stringify(row, null, 2);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const loadTriggerDefinition = async () => {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
setError('未找到数据库连接');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const triggerName = tab.triggerName || '';
|
||||
const dbName = tab.dbName || '';
|
||||
|
||||
if (!triggerName) {
|
||||
setError('触发器名称为空');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const dialect = getMetadataDialect(conn);
|
||||
const queries = buildShowTriggerQueries(dialect, triggerName, dbName);
|
||||
const sphinxLike = isSphinxConnection(conn) && dialect === 'mysql';
|
||||
|
||||
if (!queries.length || String(queries[0] || '').startsWith('--')) {
|
||||
setTriggerDefinition(String(queries[0] || '-- 暂不支持该数据库类型的触发器定义查看'));
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || '',
|
||||
database: conn.config.database || '',
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: '', port: 22, user: '', password: '', keyPath: '' }
|
||||
};
|
||||
|
||||
const result = await runQueryCandidates(config, dbName, queries);
|
||||
|
||||
if (result.success && Array.isArray(result.data) && result.data.length > 0) {
|
||||
const definition = extractTriggerDefinition(dialect, result.data);
|
||||
setTriggerDefinition(definition);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.success) {
|
||||
if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setTriggerDefinition(`-- 当前 Sphinx 实例${versionText}未返回触发器定义。\n-- 已执行多套兼容查询,可能是版本能力限制或对象类型不支持。`);
|
||||
return;
|
||||
}
|
||||
setTriggerDefinition('-- 未找到触发器定义');
|
||||
} else if (sphinxLike) {
|
||||
const version = await getVersionHint(config, dbName);
|
||||
const versionText = version ? `(版本: ${version})` : '';
|
||||
setTriggerDefinition(`-- 当前 Sphinx 实例${versionText}不支持触发器定义查询。\n-- 已自动尝试兼容语句,返回失败信息: ${result.message || 'unknown error'}`);
|
||||
} else {
|
||||
setError(result.message || '查询触发器定义失败');
|
||||
}
|
||||
} catch (e: any) {
|
||||
setError('查询触发器定义失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadTriggerDefinition();
|
||||
}, [tab.connectionId, tab.dbName, tab.triggerName, connections]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
<Spin tip="加载触发器定义..." />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div style={{ padding: 16 }}>
|
||||
<Alert type="error" message="加载失败" description={error} showIcon />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{ padding: '8px 16px', borderBottom: darkMode ? '1px solid #303030' : '1px solid #f0f0f0' }}>
|
||||
<strong>触发器: </strong>{tab.triggerName}
|
||||
{tab.dbName && <span style={{ marginLeft: 16, color: '#888' }}>数据库: {tab.dbName}</span>}
|
||||
</div>
|
||||
<div style={{ flex: 1, minHeight: 0 }}>
|
||||
<Editor
|
||||
height="100%"
|
||||
language="sql"
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={triggerDefinition}
|
||||
options={{
|
||||
readOnly: true,
|
||||
minimap: { enabled: false },
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
scrollBeyondLastLine: false,
|
||||
wordWrap: 'on',
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default TriggerViewer;
|
||||
7
frontend/src/global.d.ts
vendored
7
frontend/src/global.d.ts
vendored
@@ -2,6 +2,13 @@ export {};
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
go: any;
|
||||
runtime: {
|
||||
WindowMinimise: () => void;
|
||||
WindowToggleMaximise: () => void;
|
||||
Quit: () => void;
|
||||
BrowserOpenURL: (url: string) => void;
|
||||
};
|
||||
ipcRenderer: {
|
||||
send: (channel: string, ...args: any[]) => void;
|
||||
on: (channel: string, listener: (event: any, ...args: any[]) => void) => void;
|
||||
|
||||
@@ -3,6 +3,22 @@ import ReactDOM from 'react-dom/client'
|
||||
import App from './App'
|
||||
// import './index.css' // Optional global styles
|
||||
|
||||
// 全局配置 Monaco Editor 使用本地打包的文件,避免从 CDN (jsdelivr) 加载。
|
||||
// Windows WebView2 环境下访问外部 CDN 可能失败,导致编辑器一直显示 Loading。
|
||||
import { loader } from '@monaco-editor/react'
|
||||
import * as monaco from 'monaco-editor'
|
||||
loader.config({ monaco })
|
||||
|
||||
// 全局注册透明主题,避免每个 Editor 组件 beforeMount 中重复定义
|
||||
monaco.editor.defineTheme('transparent-dark', {
|
||||
base: 'vs-dark', inherit: true, rules: [],
|
||||
colors: { 'editor.background': '#00000000', 'editor.lineHighlightBackground': '#ffffff10', 'editorGutter.background': '#00000000' }
|
||||
})
|
||||
monaco.editor.defineTheme('transparent-light', {
|
||||
base: 'vs', inherit: true, rules: [],
|
||||
colors: { 'editor.background': '#00000000', 'editor.lineHighlightBackground': '#00000010', 'editorGutter.background': '#00000000' }
|
||||
})
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
|
||||
@@ -1,6 +1,239 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { SavedConnection, TabData, SavedQuery } from './types';
|
||||
import { ConnectionConfig, SavedConnection, TabData, SavedQuery } from './types';
|
||||
|
||||
const DEFAULT_APPEARANCE = { opacity: 1.0, blur: 0 };
|
||||
const LEGACY_DEFAULT_OPACITY = 0.95;
|
||||
const OPACITY_EPSILON = 1e-6;
|
||||
const MAX_URI_LENGTH = 4096;
|
||||
const MAX_HOST_ENTRY_LENGTH = 512;
|
||||
const MAX_HOST_ENTRIES = 64;
|
||||
const DEFAULT_TIMEOUT_SECONDS = 30;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const DEFAULT_CONNECTION_TYPE = 'mysql';
|
||||
const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'redis',
|
||||
'tdengine',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'kingbase',
|
||||
'sqlserver',
|
||||
'mongodb',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlite',
|
||||
'custom',
|
||||
]);
|
||||
|
||||
const getDefaultPortByType = (type: string): number => {
|
||||
switch (type) {
|
||||
case 'mysql':
|
||||
case 'mariadb':
|
||||
return 3306;
|
||||
case 'sphinx':
|
||||
return 9306;
|
||||
case 'postgres':
|
||||
case 'vastbase':
|
||||
return 5432;
|
||||
case 'redis':
|
||||
return 6379;
|
||||
case 'tdengine':
|
||||
return 6041;
|
||||
case 'oracle':
|
||||
return 1521;
|
||||
case 'dameng':
|
||||
return 5236;
|
||||
case 'kingbase':
|
||||
return 54321;
|
||||
case 'sqlserver':
|
||||
return 1433;
|
||||
case 'mongodb':
|
||||
return 27017;
|
||||
case 'highgo':
|
||||
return 5866;
|
||||
default:
|
||||
return 3306;
|
||||
}
|
||||
};
|
||||
|
||||
const toTrimmedString = (value: unknown, fallback = ''): string => {
|
||||
if (typeof value === 'string') {
|
||||
return value.trim();
|
||||
}
|
||||
if (typeof value === 'number' || typeof value === 'boolean') {
|
||||
return String(value).trim();
|
||||
}
|
||||
return fallback;
|
||||
};
|
||||
|
||||
const normalizePort = (value: unknown, fallbackPort: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackPort;
|
||||
const port = Math.trunc(parsed);
|
||||
if (port <= 0 || port > 65535) return fallbackPort;
|
||||
return port;
|
||||
};
|
||||
|
||||
const normalizeIntegerInRange = (value: unknown, fallbackValue: number, min: number, max: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackValue;
|
||||
const normalized = Math.trunc(parsed);
|
||||
if (normalized < min || normalized > max) return fallbackValue;
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const isValidHostEntry = (entry: string): boolean => {
|
||||
if (!entry) return false;
|
||||
if (entry.length > MAX_HOST_ENTRY_LENGTH) return false;
|
||||
if (/[()\\/\s]/.test(entry)) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
const sanitizeStringArray = (value: unknown, maxLength = 256): string[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const seen = new Set<string>();
|
||||
const result: string[] = [];
|
||||
value.forEach((entry) => {
|
||||
const normalized = toTrimmedString(entry);
|
||||
if (!normalized || normalized.length > maxLength) return;
|
||||
if (seen.has(normalized)) return;
|
||||
seen.add(normalized);
|
||||
result.push(normalized);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeNumberArray = (value: unknown, min: number, max: number): number[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const seen = new Set<number>();
|
||||
const result: number[] = [];
|
||||
value.forEach((entry) => {
|
||||
const parsed = Number(entry);
|
||||
if (!Number.isFinite(parsed)) return;
|
||||
const num = Math.trunc(parsed);
|
||||
if (num < min || num > max) return;
|
||||
if (seen.has(num)) return;
|
||||
seen.add(num);
|
||||
result.push(num);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeAddressList = (value: unknown): string[] => {
|
||||
const all = sanitizeStringArray(value, MAX_HOST_ENTRY_LENGTH)
|
||||
.filter((entry) => isValidHostEntry(entry));
|
||||
return all.slice(0, MAX_HOST_ENTRIES);
|
||||
};
|
||||
|
||||
const normalizeConnectionType = (value: unknown): string => {
|
||||
const type = toTrimmedString(value).toLowerCase();
|
||||
return SUPPORTED_CONNECTION_TYPES.has(type) ? type : DEFAULT_CONNECTION_TYPE;
|
||||
};
|
||||
|
||||
const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const type = normalizeConnectionType(raw.type);
|
||||
const defaultPort = getDefaultPortByType(type);
|
||||
const savePassword = typeof raw.savePassword === 'boolean' ? raw.savePassword : true;
|
||||
const mongoSrv = !!raw.mongoSrv;
|
||||
|
||||
const sshRaw = (raw.ssh && typeof raw.ssh === 'object') ? raw.ssh as Record<string, unknown> : {};
|
||||
const ssh = {
|
||||
host: toTrimmedString(sshRaw.host),
|
||||
port: normalizePort(sshRaw.port, 22),
|
||||
user: toTrimmedString(sshRaw.user),
|
||||
password: toTrimmedString(sshRaw.password),
|
||||
keyPath: toTrimmedString(sshRaw.keyPath),
|
||||
};
|
||||
|
||||
const safeConfig: ConnectionConfig & Record<string, unknown> = {
|
||||
...raw,
|
||||
type,
|
||||
host: toTrimmedString(raw.host, 'localhost') || 'localhost',
|
||||
port: normalizePort(raw.port, defaultPort),
|
||||
user: toTrimmedString(raw.user),
|
||||
password: savePassword ? toTrimmedString(raw.password) : '',
|
||||
savePassword,
|
||||
database: toTrimmedString(raw.database),
|
||||
useSSH: !!raw.useSSH,
|
||||
ssh,
|
||||
uri: toTrimmedString(raw.uri).slice(0, MAX_URI_LENGTH),
|
||||
hosts: sanitizeAddressList(raw.hosts),
|
||||
topology: raw.topology === 'replica' ? 'replica' : 'single',
|
||||
mysqlReplicaUser: toTrimmedString(raw.mysqlReplicaUser),
|
||||
mysqlReplicaPassword: savePassword ? toTrimmedString(raw.mysqlReplicaPassword) : '',
|
||||
replicaSet: toTrimmedString(raw.replicaSet),
|
||||
authSource: toTrimmedString(raw.authSource),
|
||||
readPreference: toTrimmedString(raw.readPreference),
|
||||
mongoSrv,
|
||||
mongoAuthMechanism: toTrimmedString(raw.mongoAuthMechanism),
|
||||
mongoReplicaUser: toTrimmedString(raw.mongoReplicaUser),
|
||||
mongoReplicaPassword: savePassword ? toTrimmedString(raw.mongoReplicaPassword) : '',
|
||||
timeout: normalizeIntegerInRange(raw.timeout, DEFAULT_TIMEOUT_SECONDS, 1, MAX_TIMEOUT_SECONDS),
|
||||
};
|
||||
|
||||
if (type === 'redis') {
|
||||
safeConfig.redisDB = normalizeIntegerInRange(raw.redisDB, 0, 0, 15);
|
||||
}
|
||||
|
||||
if (type === 'custom') {
|
||||
safeConfig.driver = toTrimmedString(raw.driver);
|
||||
safeConfig.dsn = toTrimmedString(raw.dsn).slice(0, MAX_URI_LENGTH);
|
||||
}
|
||||
|
||||
return safeConfig;
|
||||
};
|
||||
|
||||
const sanitizeSavedConnection = (value: unknown, index: number): SavedConnection | null => {
|
||||
if (!value || typeof value !== 'object') return null;
|
||||
const raw = value as Record<string, unknown>;
|
||||
const config = sanitizeConnectionConfig(raw.config);
|
||||
const id = toTrimmedString(raw.id, `conn-${index + 1}`) || `conn-${index + 1}`;
|
||||
const fallbackName = config.host ? `${config.type}-${config.host}` : `连接-${index + 1}`;
|
||||
const name = toTrimmedString(raw.name, fallbackName) || fallbackName;
|
||||
const includeDatabases = sanitizeStringArray(raw.includeDatabases, 256);
|
||||
const includeRedisDatabases = sanitizeNumberArray(raw.includeRedisDatabases, 0, 15);
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
config,
|
||||
includeDatabases: includeDatabases.length > 0 ? includeDatabases : undefined,
|
||||
includeRedisDatabases: includeRedisDatabases.length > 0 ? includeRedisDatabases : undefined,
|
||||
};
|
||||
};
|
||||
|
||||
const sanitizeConnections = (value: unknown): SavedConnection[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: SavedConnection[] = [];
|
||||
const idSet = new Set<string>();
|
||||
|
||||
value.forEach((entry, index) => {
|
||||
const conn = sanitizeSavedConnection(entry, index);
|
||||
if (!conn) return;
|
||||
let nextId = conn.id;
|
||||
if (idSet.has(nextId)) {
|
||||
nextId = `${nextId}-${index + 1}`;
|
||||
}
|
||||
idSet.add(nextId);
|
||||
result.push({ ...conn, id: nextId });
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const isLegacyDefaultAppearance = (appearance: Partial<{ opacity: number; blur: number }> | undefined): boolean => {
|
||||
if (!appearance) {
|
||||
return true;
|
||||
}
|
||||
const opacity = typeof appearance.opacity === 'number' ? appearance.opacity : LEGACY_DEFAULT_OPACITY;
|
||||
const blur = typeof appearance.blur === 'number' ? appearance.blur : 0;
|
||||
return Math.abs(opacity - LEGACY_DEFAULT_OPACITY) < OPACITY_EPSILON && blur === 0;
|
||||
};
|
||||
|
||||
export interface SqlLog {
|
||||
id: string;
|
||||
@@ -19,29 +252,120 @@ interface AppState {
|
||||
activeTabId: string | null;
|
||||
activeContext: { connectionId: string; dbName: string } | null;
|
||||
savedQueries: SavedQuery[];
|
||||
darkMode: boolean;
|
||||
theme: 'light' | 'dark';
|
||||
appearance: { opacity: number; blur: number };
|
||||
sqlFormatOptions: { keywordCase: 'upper' | 'lower' };
|
||||
queryOptions: { maxRows: number };
|
||||
sqlLogs: SqlLog[];
|
||||
|
||||
tableAccessCount: Record<string, number>;
|
||||
tableSortPreference: Record<string, 'name' | 'frequency'>;
|
||||
|
||||
addConnection: (conn: SavedConnection) => void;
|
||||
updateConnection: (conn: SavedConnection) => void;
|
||||
removeConnection: (id: string) => void;
|
||||
|
||||
|
||||
addTab: (tab: TabData) => void;
|
||||
closeTab: (id: string) => void;
|
||||
closeOtherTabs: (id: string) => void;
|
||||
closeTabsToLeft: (id: string) => void;
|
||||
closeTabsToRight: (id: string) => void;
|
||||
closeTabsByConnection: (connectionId: string) => void;
|
||||
closeTabsByDatabase: (connectionId: string, dbName: string) => void;
|
||||
closeAllTabs: () => void;
|
||||
setActiveTab: (id: string) => void;
|
||||
setActiveContext: (context: { connectionId: string; dbName: string } | null) => void;
|
||||
|
||||
saveQuery: (query: SavedQuery) => void;
|
||||
deleteQuery: (id: string) => void;
|
||||
|
||||
toggleDarkMode: () => void;
|
||||
setTheme: (theme: 'light' | 'dark') => void;
|
||||
setAppearance: (appearance: Partial<{ opacity: number; blur: number }>) => void;
|
||||
setSqlFormatOptions: (options: { keywordCase: 'upper' | 'lower' }) => void;
|
||||
|
||||
setQueryOptions: (options: Partial<{ maxRows: number }>) => void;
|
||||
|
||||
addSqlLog: (log: SqlLog) => void;
|
||||
clearSqlLogs: () => void;
|
||||
|
||||
recordTableAccess: (connectionId: string, dbName: string, tableName: string) => void;
|
||||
setTableSortPreference: (connectionId: string, dbName: string, sortBy: 'name' | 'frequency') => void;
|
||||
}
|
||||
|
||||
const sanitizeSavedQueries = (value: unknown): SavedQuery[] => {
|
||||
if (!Array.isArray(value)) return [];
|
||||
const result: SavedQuery[] = [];
|
||||
value.forEach((entry, index) => {
|
||||
if (!entry || typeof entry !== 'object') return;
|
||||
const raw = entry as Record<string, unknown>;
|
||||
const id = toTrimmedString(raw.id, `query-${index + 1}`) || `query-${index + 1}`;
|
||||
const sql = toTrimmedString(raw.sql);
|
||||
const connectionId = toTrimmedString(raw.connectionId);
|
||||
const dbName = toTrimmedString(raw.dbName);
|
||||
if (!sql || !connectionId || !dbName) return;
|
||||
result.push({
|
||||
id,
|
||||
name: toTrimmedString(raw.name, `查询-${index + 1}`) || `查询-${index + 1}`,
|
||||
sql,
|
||||
connectionId,
|
||||
dbName,
|
||||
createdAt: Number.isFinite(Number(raw.createdAt)) ? Number(raw.createdAt) : Date.now(),
|
||||
});
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTheme = (value: unknown): 'light' | 'dark' => (value === 'dark' ? 'dark' : 'light');
|
||||
|
||||
const sanitizeSqlFormatOptions = (value: unknown): { keywordCase: 'upper' | 'lower' } => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
return { keywordCase: raw.keywordCase === 'lower' ? 'lower' : 'upper' };
|
||||
};
|
||||
|
||||
const sanitizeQueryOptions = (value: unknown): { maxRows: number } => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const maxRows = Number(raw.maxRows);
|
||||
if (!Number.isFinite(maxRows) || maxRows <= 0) {
|
||||
return { maxRows: 5000 };
|
||||
}
|
||||
return { maxRows: Math.min(50000, Math.trunc(maxRows)) };
|
||||
};
|
||||
|
||||
const sanitizeTableAccessCount = (value: unknown): Record<string, number> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, number> = {};
|
||||
Object.entries(raw).forEach(([key, count]) => {
|
||||
const parsed = Number(count);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) return;
|
||||
result[key] = Math.trunc(parsed);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeTableSortPreference = (value: unknown): Record<string, 'name' | 'frequency'> => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const result: Record<string, 'name' | 'frequency'> = {};
|
||||
Object.entries(raw).forEach(([key, preference]) => {
|
||||
result[key] = preference === 'frequency' ? 'frequency' : 'name';
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const sanitizeAppearance = (
|
||||
appearance: Partial<{ opacity: number; blur: number }> | undefined,
|
||||
version: number
|
||||
): { opacity: number; blur: number } => {
|
||||
if (!appearance || typeof appearance !== 'object') {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
const nextAppearance = {
|
||||
opacity: typeof appearance.opacity === 'number' ? appearance.opacity : DEFAULT_APPEARANCE.opacity,
|
||||
blur: typeof appearance.blur === 'number' ? appearance.blur : DEFAULT_APPEARANCE.blur,
|
||||
};
|
||||
if (version < 2 && isLegacyDefaultAppearance(appearance)) {
|
||||
return { ...DEFAULT_APPEARANCE };
|
||||
}
|
||||
return nextAppearance;
|
||||
};
|
||||
|
||||
export const useStore = create<AppState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
@@ -50,13 +374,17 @@ export const useStore = create<AppState>()(
|
||||
activeTabId: null,
|
||||
activeContext: null,
|
||||
savedQueries: [],
|
||||
darkMode: false,
|
||||
theme: 'light',
|
||||
appearance: { ...DEFAULT_APPEARANCE },
|
||||
sqlFormatOptions: { keywordCase: 'upper' },
|
||||
queryOptions: { maxRows: 5000 },
|
||||
sqlLogs: [],
|
||||
tableAccessCount: {},
|
||||
tableSortPreference: {},
|
||||
|
||||
addConnection: (conn) => set((state) => ({ connections: [...state.connections, conn] })),
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
updateConnection: (conn) => set((state) => ({
|
||||
connections: state.connections.map(c => c.id === conn.id ? conn : c)
|
||||
})),
|
||||
removeConnection: (id) => set((state) => ({ connections: state.connections.filter(c => c.id !== id) })),
|
||||
|
||||
@@ -79,6 +407,69 @@ export const useStore = create<AppState>()(
|
||||
}
|
||||
return { tabs: newTabs, activeTabId: newActiveId };
|
||||
}),
|
||||
|
||||
closeOtherTabs: (id) => set((state) => {
|
||||
const keep = state.tabs.find(t => t.id === id);
|
||||
if (!keep) return state;
|
||||
return { tabs: [keep], activeTabId: id };
|
||||
}),
|
||||
|
||||
closeTabsToLeft: (id) => set((state) => {
|
||||
const index = state.tabs.findIndex(t => t.id === id);
|
||||
if (index === -1) return state;
|
||||
const newTabs = state.tabs.slice(index);
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
return { tabs: newTabs, activeTabId: activeStillExists ? state.activeTabId : id };
|
||||
}),
|
||||
|
||||
closeTabsToRight: (id) => set((state) => {
|
||||
const index = state.tabs.findIndex(t => t.id === id);
|
||||
if (index === -1) return state;
|
||||
const newTabs = state.tabs.slice(0, index + 1);
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
return { tabs: newTabs, activeTabId: activeStillExists ? state.activeTabId : id };
|
||||
}),
|
||||
|
||||
closeTabsByConnection: (connectionId) => set((state) => {
|
||||
const targetConnectionId = String(connectionId || '').trim();
|
||||
if (!targetConnectionId) return state;
|
||||
const newTabs = state.tabs.filter(t => String(t.connectionId || '').trim() !== targetConnectionId);
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
const nextActiveTabId = activeStillExists
|
||||
? state.activeTabId
|
||||
: (newTabs.length > 0 ? newTabs[newTabs.length - 1].id : null);
|
||||
const nextActiveContext = state.activeContext?.connectionId === targetConnectionId ? null : state.activeContext;
|
||||
return {
|
||||
tabs: newTabs,
|
||||
activeTabId: nextActiveTabId,
|
||||
activeContext: nextActiveContext,
|
||||
};
|
||||
}),
|
||||
|
||||
closeTabsByDatabase: (connectionId, dbName) => set((state) => {
|
||||
const targetConnectionId = String(connectionId || '').trim();
|
||||
const targetDbName = String(dbName || '').trim();
|
||||
if (!targetConnectionId || !targetDbName) return state;
|
||||
const newTabs = state.tabs.filter((tab) => {
|
||||
const sameConnection = String(tab.connectionId || '').trim() === targetConnectionId;
|
||||
const sameDb = String(tab.dbName || '').trim() === targetDbName;
|
||||
return !(sameConnection && sameDb);
|
||||
});
|
||||
const activeStillExists = state.activeTabId ? newTabs.some(t => t.id === state.activeTabId) : false;
|
||||
const nextActiveTabId = activeStillExists
|
||||
? state.activeTabId
|
||||
: (newTabs.length > 0 ? newTabs[newTabs.length - 1].id : null);
|
||||
const sameActiveContext = state.activeContext
|
||||
&& state.activeContext.connectionId === targetConnectionId
|
||||
&& state.activeContext.dbName === targetDbName;
|
||||
return {
|
||||
tabs: newTabs,
|
||||
activeTabId: nextActiveTabId,
|
||||
activeContext: sameActiveContext ? null : state.activeContext,
|
||||
};
|
||||
}),
|
||||
|
||||
closeAllTabs: () => set(() => ({ tabs: [], activeTabId: null })),
|
||||
|
||||
setActiveTab: (id) => set({ activeTabId: id }),
|
||||
setActiveContext: (context) => set({ activeContext: context }),
|
||||
@@ -94,15 +485,81 @@ export const useStore = create<AppState>()(
|
||||
|
||||
deleteQuery: (id) => set((state) => ({ savedQueries: state.savedQueries.filter(q => q.id !== id) })),
|
||||
|
||||
toggleDarkMode: () => set((state) => ({ darkMode: !state.darkMode })),
|
||||
setTheme: (theme) => set({ theme }),
|
||||
setAppearance: (appearance) => set((state) => ({ appearance: { ...state.appearance, ...appearance } })),
|
||||
setSqlFormatOptions: (options) => set({ sqlFormatOptions: options }),
|
||||
|
||||
setQueryOptions: (options) => set((state) => ({ queryOptions: { ...state.queryOptions, ...options } })),
|
||||
|
||||
addSqlLog: (log) => set((state) => ({ sqlLogs: [log, ...state.sqlLogs].slice(0, 1000) })), // Keep last 1000 logs
|
||||
clearSqlLogs: () => set({ sqlLogs: [] }),
|
||||
|
||||
recordTableAccess: (connectionId, dbName, tableName) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}-${tableName}`;
|
||||
const currentCount = state.tableAccessCount[key] || 0;
|
||||
return {
|
||||
tableAccessCount: {
|
||||
...state.tableAccessCount,
|
||||
[key]: currentCount + 1
|
||||
}
|
||||
};
|
||||
}),
|
||||
|
||||
setTableSortPreference: (connectionId, dbName, sortBy) => set((state) => {
|
||||
const key = `${connectionId}-${dbName}`;
|
||||
return {
|
||||
tableSortPreference: {
|
||||
...state.tableSortPreference,
|
||||
[key]: sortBy
|
||||
}
|
||||
};
|
||||
}),
|
||||
}),
|
||||
{
|
||||
name: 'lite-db-storage', // name of the item in the storage (must be unique)
|
||||
partialize: (state) => ({ connections: state.connections, savedQueries: state.savedQueries, darkMode: state.darkMode, sqlFormatOptions: state.sqlFormatOptions }), // Don't persist logs
|
||||
version: 3,
|
||||
migrate: (persistedState: unknown, version: number) => {
|
||||
if (!persistedState || typeof persistedState !== 'object') {
|
||||
return persistedState as AppState;
|
||||
}
|
||||
const state = persistedState as Partial<AppState>;
|
||||
const nextState: Partial<AppState> = { ...state };
|
||||
nextState.connections = sanitizeConnections(state.connections);
|
||||
nextState.savedQueries = sanitizeSavedQueries(state.savedQueries);
|
||||
nextState.theme = sanitizeTheme(state.theme);
|
||||
nextState.appearance = sanitizeAppearance(state.appearance, version);
|
||||
nextState.sqlFormatOptions = sanitizeSqlFormatOptions(state.sqlFormatOptions);
|
||||
nextState.queryOptions = sanitizeQueryOptions(state.queryOptions);
|
||||
nextState.tableAccessCount = sanitizeTableAccessCount(state.tableAccessCount);
|
||||
nextState.tableSortPreference = sanitizeTableSortPreference(state.tableSortPreference);
|
||||
return nextState as AppState;
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const state = (persistedState && typeof persistedState === 'object')
|
||||
? persistedState as Partial<AppState>
|
||||
: {};
|
||||
return {
|
||||
...currentState,
|
||||
...state,
|
||||
connections: sanitizeConnections(state.connections),
|
||||
savedQueries: sanitizeSavedQueries(state.savedQueries),
|
||||
theme: sanitizeTheme(state.theme),
|
||||
appearance: sanitizeAppearance(state.appearance, 3),
|
||||
sqlFormatOptions: sanitizeSqlFormatOptions(state.sqlFormatOptions),
|
||||
queryOptions: sanitizeQueryOptions(state.queryOptions),
|
||||
tableAccessCount: sanitizeTableAccessCount(state.tableAccessCount),
|
||||
tableSortPreference: sanitizeTableSortPreference(state.tableSortPreference),
|
||||
};
|
||||
},
|
||||
partialize: (state) => ({
|
||||
connections: state.connections,
|
||||
savedQueries: state.savedQueries,
|
||||
theme: state.theme,
|
||||
appearance: state.appearance,
|
||||
sqlFormatOptions: state.sqlFormatOptions,
|
||||
queryOptions: state.queryOptions,
|
||||
tableAccessCount: state.tableAccessCount,
|
||||
tableSortPreference: state.tableSortPreference
|
||||
}), // Don't persist logs
|
||||
}
|
||||
)
|
||||
);
|
||||
);
|
||||
|
||||
@@ -12,9 +12,35 @@ export interface ConnectionConfig {
|
||||
port: number;
|
||||
user: string;
|
||||
password?: string;
|
||||
savePassword?: boolean;
|
||||
database?: string;
|
||||
useSSH?: boolean;
|
||||
ssh?: SSHConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number; // Redis database index (0-15)
|
||||
uri?: string; // Connection URI for copy/paste
|
||||
hosts?: string[]; // Multi-host addresses: host:port
|
||||
topology?: 'single' | 'replica';
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
authSource?: string;
|
||||
readPreference?: string;
|
||||
mongoSrv?: boolean;
|
||||
mongoAuthMechanism?: string;
|
||||
mongoReplicaUser?: string;
|
||||
mongoReplicaPassword?: string;
|
||||
}
|
||||
|
||||
export interface MongoMemberInfo {
|
||||
host: string;
|
||||
role: string;
|
||||
state: string;
|
||||
stateCode?: number;
|
||||
healthy: boolean;
|
||||
isSelf?: boolean;
|
||||
}
|
||||
|
||||
export interface SavedConnection {
|
||||
@@ -22,6 +48,7 @@ export interface SavedConnection {
|
||||
name: string;
|
||||
config: ConnectionConfig;
|
||||
includeDatabases?: string[];
|
||||
includeRedisDatabases?: number[]; // Redis databases to show (0-15)
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
@@ -60,13 +87,18 @@ export interface TriggerDefinition {
|
||||
export interface TabData {
|
||||
id: string;
|
||||
title: string;
|
||||
type: 'query' | 'table' | 'design';
|
||||
type: 'query' | 'table' | 'design' | 'redis-keys' | 'redis-command' | 'trigger' | 'view-def' | 'routine-def';
|
||||
connectionId: string;
|
||||
dbName?: string;
|
||||
tableName?: string;
|
||||
query?: string;
|
||||
initialTab?: string;
|
||||
readOnly?: boolean;
|
||||
redisDB?: number; // Redis database index for redis tabs
|
||||
triggerName?: string; // Trigger name for trigger tabs
|
||||
viewName?: string; // View name for view definition tabs
|
||||
routineName?: string; // Routine name for function/procedure definition tabs
|
||||
routineType?: string; // 'FUNCTION' or 'PROCEDURE'
|
||||
}
|
||||
|
||||
export interface DatabaseNode {
|
||||
@@ -85,3 +117,37 @@ export interface SavedQuery {
|
||||
dbName: string;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
// Redis types
|
||||
export interface RedisKeyInfo {
|
||||
key: string;
|
||||
type: string;
|
||||
ttl: number;
|
||||
}
|
||||
|
||||
export interface RedisScanResult {
|
||||
keys: RedisKeyInfo[];
|
||||
cursor: number;
|
||||
}
|
||||
|
||||
export interface RedisValue {
|
||||
type: 'string' | 'hash' | 'list' | 'set' | 'zset' | 'stream';
|
||||
ttl: number;
|
||||
value: any;
|
||||
length: number;
|
||||
}
|
||||
|
||||
export interface RedisDBInfo {
|
||||
index: number;
|
||||
keys: number;
|
||||
}
|
||||
|
||||
export interface ZSetMember {
|
||||
member: string;
|
||||
score: number;
|
||||
}
|
||||
|
||||
export interface StreamEntry {
|
||||
id: string;
|
||||
fields: Record<string, string>;
|
||||
}
|
||||
|
||||
66
frontend/src/utils/appearance.ts
Normal file
66
frontend/src/utils/appearance.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
const DEFAULT_OPACITY = 1.0;
|
||||
const MIN_OPACITY = 0.1;
|
||||
const MAX_OPACITY = 1.0;
|
||||
|
||||
// 平台透明度映射因子:值越大,滑块变化越平滑(1.0 = 线性映射)
|
||||
const MAC_OPACITY_FACTOR = 0.60;
|
||||
const MAC_BLUR_FACTOR = 1.00;
|
||||
const WINDOWS_OPACITY_FACTOR = 0.70;
|
||||
const WINDOWS_BLUR_FACTOR = 1.00;
|
||||
|
||||
const clamp = (value: number, min: number, max: number) => Math.min(max, Math.max(min, value));
|
||||
|
||||
export const isMacLikePlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
const platform = navigator.platform || '';
|
||||
const ua = navigator.userAgent || '';
|
||||
return /(Mac|iPhone|iPad|iPod)/i.test(`${platform} ${ua}`);
|
||||
};
|
||||
|
||||
export const isWindowsPlatform = (): boolean => {
|
||||
if (typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
const platform = navigator.platform || '';
|
||||
const ua = navigator.userAgent || '';
|
||||
return /(Win|Windows)/i.test(`${platform} ${ua}`);
|
||||
};
|
||||
|
||||
const getPlatformFactors = () => {
|
||||
if (isMacLikePlatform()) {
|
||||
return { opacity: MAC_OPACITY_FACTOR, blur: MAC_BLUR_FACTOR };
|
||||
}
|
||||
if (isWindowsPlatform()) {
|
||||
return { opacity: WINDOWS_OPACITY_FACTOR, blur: WINDOWS_BLUR_FACTOR };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const normalizeOpacityForPlatform = (opacity: number | undefined): number => {
|
||||
const raw = clamp(opacity ?? DEFAULT_OPACITY, MIN_OPACITY, MAX_OPACITY);
|
||||
// 用户显式拉到 100%% 时,必须保持完全不透明,不能再被平台映射压低。
|
||||
if (raw >= MAX_OPACITY - 1e-6) {
|
||||
return MAX_OPACITY;
|
||||
}
|
||||
const factors = getPlatformFactors();
|
||||
if (!factors) {
|
||||
return raw;
|
||||
}
|
||||
|
||||
return clamp(MIN_OPACITY + (raw - MIN_OPACITY) * factors.opacity, MIN_OPACITY, MAX_OPACITY);
|
||||
};
|
||||
|
||||
export const normalizeBlurForPlatform = (blur: number | undefined): number => {
|
||||
const raw = Math.max(0, blur ?? 0);
|
||||
const factors = getPlatformFactors();
|
||||
if (!factors) {
|
||||
return raw;
|
||||
}
|
||||
return Math.round(raw * factors.blur);
|
||||
};
|
||||
|
||||
export const blurToFilter = (blur: number): string | undefined => {
|
||||
return blur > 0 ? `blur(${blur}px)` : undefined;
|
||||
};
|
||||
272
frontend/src/utils/sql.ts
Normal file
272
frontend/src/utils/sql.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
export type FilterCondition = {
|
||||
id?: number;
|
||||
enabled?: boolean;
|
||||
column?: string;
|
||||
op?: string;
|
||||
value?: string;
|
||||
value2?: string;
|
||||
};
|
||||
|
||||
const normalizeIdentPart = (ident: string) => {
|
||||
let raw = (ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const first = raw[0];
|
||||
const last = raw[raw.length - 1];
|
||||
if ((first === '"' && last === '"') || (first === '`' && last === '`')) {
|
||||
raw = raw.slice(1, -1).trim();
|
||||
}
|
||||
raw = raw.replace(/["`]/g, '').trim();
|
||||
return raw;
|
||||
};
|
||||
|
||||
// 检查标识符是否需要引号(包含特殊字符或是保留字)
|
||||
const needsQuote = (ident: string): boolean => {
|
||||
if (!ident) return false;
|
||||
// 如果包含特殊字符(非字母、数字、下划线)则需要引号
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(ident)) return true;
|
||||
// PostgreSQL 会将未加引号的标识符折叠为小写,含大写字母时必须加引号
|
||||
if (/[A-Z]/.test(ident)) return true;
|
||||
// 常见 SQL 保留字列表(简化版)
|
||||
const reserved = ['select', 'from', 'where', 'table', 'index', 'user', 'order', 'group', 'by', 'limit', 'offset', 'and', 'or', 'not', 'null', 'true', 'false', 'key', 'primary', 'foreign', 'references', 'default', 'constraint', 'create', 'drop', 'alter', 'insert', 'update', 'delete', 'set', 'values', 'into', 'join', 'left', 'right', 'inner', 'outer', 'on', 'as', 'is', 'in', 'like', 'between', 'case', 'when', 'then', 'else', 'end', 'having', 'distinct', 'all', 'any', 'exists', 'union', 'except', 'intersect'];
|
||||
return reserved.includes(ident.toLowerCase());
|
||||
};
|
||||
|
||||
export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
const raw = normalizeIdentPart(ident);
|
||||
if (!raw) return raw;
|
||||
const dbTypeLower = (dbType || '').toLowerCase();
|
||||
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
|
||||
// 对于 KingBase/PostgreSQL,只在必要时加引号
|
||||
if (dbTypeLower === 'kingbase' || dbTypeLower === 'postgres') {
|
||||
if (needsQuote(raw)) {
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
}
|
||||
// 不加引号,保持原样(数据库会自动转小写处理)
|
||||
return raw;
|
||||
}
|
||||
|
||||
// 其他数据库默认加双引号
|
||||
return `"${raw.replace(/"/g, '""')}"`;
|
||||
};
|
||||
|
||||
export const quoteQualifiedIdent = (dbType: string, ident: string) => {
|
||||
const raw = (ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const parts = raw.split('.').map(normalizeIdentPart).filter(Boolean);
|
||||
if (parts.length <= 1) return quoteIdentPart(dbType, raw);
|
||||
return parts.map(p => quoteIdentPart(dbType, p)).join('.');
|
||||
};
|
||||
|
||||
export const escapeLiteral = (val: string) => (val || '').replace(/'/g, "''");
|
||||
|
||||
type SortInfo = {
|
||||
columnKey?: string;
|
||||
order?: string;
|
||||
} | null | undefined;
|
||||
|
||||
// 为排序查询按库类型注入 sort_buffer 提升参数(仅影响当前语句)。
|
||||
// MySQL: 使用 Optimizer Hint `SET_VAR`。
|
||||
// MariaDB: 使用 `SET STATEMENT ... FOR` 包装当前查询。
|
||||
export const withSortBufferTuningSQL = (
|
||||
dbType: string,
|
||||
sql: string,
|
||||
sortBufferBytes: number,
|
||||
) => {
|
||||
const rawSql = String(sql || '');
|
||||
const trimmed = rawSql.trim();
|
||||
if (!trimmed) return rawSql;
|
||||
if (!/^select\b/i.test(trimmed)) return rawSql;
|
||||
|
||||
const normalizedType = String(dbType || '').trim().toLowerCase();
|
||||
const bytes = Math.max(256 * 1024, Math.floor(Number(sortBufferBytes) || 0));
|
||||
if (normalizedType === 'mysql') {
|
||||
return rawSql.replace(
|
||||
/^\s*select\b/i,
|
||||
(matched) => `${matched} /*+ SET_VAR(sort_buffer_size=${bytes}) */`,
|
||||
);
|
||||
}
|
||||
if (normalizedType === 'mariadb') {
|
||||
return `SET STATEMENT sort_buffer_size=${bytes} FOR ${rawSql}`;
|
||||
}
|
||||
return rawSql;
|
||||
};
|
||||
|
||||
export const buildOrderBySQL = (
|
||||
dbType: string,
|
||||
sortInfo: SortInfo,
|
||||
fallbackColumns: string[] = [],
|
||||
) => {
|
||||
const dbTypeLower = String(dbType || '').trim().toLowerCase();
|
||||
const sortColumn = normalizeIdentPart(String(sortInfo?.columnKey || ''));
|
||||
const sortOrder = String(sortInfo?.order || '');
|
||||
const direction = sortOrder === 'ascend' ? 'ASC' : sortOrder === 'descend' ? 'DESC' : '';
|
||||
if (sortColumn && direction) {
|
||||
return ` ORDER BY ${quoteIdentPart(dbType, sortColumn)} ${direction}`;
|
||||
}
|
||||
|
||||
// MySQL/MariaDB 大表在无显式排序需求时强制 ORDER BY(即使按主键)可能触发 filesort,
|
||||
// 导致 `Error 1038 (HY001): Out of sort memory`。
|
||||
// 因此仅在用户主动点击排序时下发 ORDER BY,默认分页查询不加兜底排序。
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
const stableColumns = (fallbackColumns || [])
|
||||
.map((col) => normalizeIdentPart(String(col || '')))
|
||||
.filter((col) => {
|
||||
if (!col) return false;
|
||||
const key = col.toLowerCase();
|
||||
if (seen.has(key)) return false;
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
if (stableColumns.length > 0) {
|
||||
const parts = stableColumns.map((col) => `${quoteIdentPart(dbType, col)} ASC`);
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
export const parseListValues = (val: string) => {
|
||||
const raw = (val || '').trim();
|
||||
if (!raw) return [];
|
||||
return raw
|
||||
.split(/[\n,,]+/)
|
||||
.map(s => s.trim())
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
||||
export const buildWhereSQL = (dbType: string, conditions: FilterCondition[]) => {
|
||||
const whereParts: string[] = [];
|
||||
|
||||
(conditions || []).forEach((cond) => {
|
||||
if (cond?.enabled === false) return;
|
||||
|
||||
const op = (cond?.op || '').trim();
|
||||
const column = (cond?.column || '').trim();
|
||||
const value = (cond?.value ?? '').toString();
|
||||
const value2 = (cond?.value2 ?? '').toString();
|
||||
|
||||
if (op === 'CUSTOM') {
|
||||
const expr = value.trim();
|
||||
if (expr) whereParts.push(`(${expr})`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!column) return;
|
||||
|
||||
const col = quoteIdentPart(dbType, column);
|
||||
|
||||
switch (op) {
|
||||
case 'IS_NULL':
|
||||
whereParts.push(`${col} IS NULL`);
|
||||
return;
|
||||
case 'IS_NOT_NULL':
|
||||
whereParts.push(`${col} IS NOT NULL`);
|
||||
return;
|
||||
case 'IS_EMPTY':
|
||||
// 兼容:空值通常理解为 NULL 或空字符串
|
||||
whereParts.push(`(${col} IS NULL OR ${col} = '')`);
|
||||
return;
|
||||
case 'IS_NOT_EMPTY':
|
||||
whereParts.push(`(${col} IS NOT NULL AND ${col} <> '')`);
|
||||
return;
|
||||
case 'BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_BETWEEN': {
|
||||
const v1 = value.trim();
|
||||
const v2 = value2.trim();
|
||||
if (!v1 || !v2) return;
|
||||
whereParts.push(`${col} NOT BETWEEN '${escapeLiteral(v1)}' AND '${escapeLiteral(v2)}'`);
|
||||
return;
|
||||
}
|
||||
case 'IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_IN': {
|
||||
const items = parseListValues(value);
|
||||
if (items.length === 0) return;
|
||||
const list = items.map(v => `'${escapeLiteral(v)}'`).join(', ');
|
||||
whereParts.push(`${col} NOT IN (${list})`);
|
||||
return;
|
||||
}
|
||||
case 'CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_CONTAINS': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_STARTS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
case 'ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case 'NOT_ENDS_WITH': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} NOT LIKE '%${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
case '=':
|
||||
case '!=':
|
||||
case '<':
|
||||
case '<=':
|
||||
case '>':
|
||||
case '>=': {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
// 兼容旧值:LIKE
|
||||
if (op.toUpperCase() === 'LIKE') {
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} LIKE '%${escapeLiteral(v)}%'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const v = value.trim();
|
||||
if (!v) return;
|
||||
whereParts.push(`${col} ${op} '${escapeLiteral(v)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : '';
|
||||
};
|
||||
89
frontend/wailsjs/go/app/App.d.ts
vendored
89
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -2,9 +2,12 @@
|
||||
// This file is automatically generated. DO NOT EDIT
|
||||
import {connection} from '../models';
|
||||
import {sync} from '../models';
|
||||
import {redis} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckForUpdates():Promise<connection.QueryResult>;
|
||||
|
||||
export function CreateDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -29,14 +32,44 @@ export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,a
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
|
||||
export function DataSyncAnalyze(arg1:sync.SyncConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSyncPreview(arg1:sync.SyncConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function DownloadUpdate():Promise<connection.QueryResult>;
|
||||
|
||||
export function DropDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropFunction(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DropView(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportData(arg1:Array<Record<string, any>>,arg2:Array<string>,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportDatabaseSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:boolean):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string,arg5:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTablesDataSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTablesSQL(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>,arg4:boolean):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetAppInfo():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportConfigFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportData(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportDataWithProgress(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function InstallUpdateAndRestart():Promise<connection.QueryResult>;
|
||||
|
||||
export function MongoDiscoverMembers(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function MySQLConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function MySQLGetDatabases(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -49,4 +82,60 @@ export function MySQLShowCreateTable(arg1:connection.ConnectionConfig,arg2:strin
|
||||
|
||||
export function OpenSQLFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function PreviewImportFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisDeleteHashField(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisDeleteKeys(arg1:connection.ConnectionConfig,arg2:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisExecuteCommand(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisFlushDB(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisGetDatabases(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisGetServerInfo(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisGetValue(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisListPush(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisListSet(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisRenameKey(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSelectDB(arg1:connection.ConnectionConfig,arg2:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSetAdd(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSetHashField(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSetRemove(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSetString(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSetTTL(arg1:connection.ConnectionConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisStreamAdd(arg1:connection.ConnectionConfig,arg2:string,arg3:Record<string, string>,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisStreamDelete(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisTestConnection(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisZSetAdd(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<redis.ZSetMember>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisZSetRemove(arg1:connection.ConnectionConfig,arg2:string,arg3:Array<string>):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameDatabase(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RenameView(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SetWindowTranslucency(arg1:number,arg2:number):Promise<void>;
|
||||
|
||||
export function TestConnection(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -6,6 +6,10 @@ export function ApplyChanges(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ApplyChanges'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function CheckForUpdates() {
|
||||
return window['go']['app']['App']['CheckForUpdates']();
|
||||
}
|
||||
|
||||
export function CreateDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['CreateDatabase'](arg1, arg2);
|
||||
}
|
||||
@@ -54,14 +58,62 @@ export function DataSync(arg1) {
|
||||
return window['go']['app']['App']['DataSync'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncAnalyze(arg1) {
|
||||
return window['go']['app']['App']['DataSyncAnalyze'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncPreview(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DataSyncPreview'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DownloadUpdate() {
|
||||
return window['go']['app']['App']['DownloadUpdate']();
|
||||
}
|
||||
|
||||
export function DropDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['DropDatabase'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function DropFunction(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DropFunction'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DropTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DropTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DropView(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DropView'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportData(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportData'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function ExportDatabaseSQL(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['ExportDatabaseSQL'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportQuery(arg1, arg2, arg3, arg4, arg5) {
|
||||
return window['go']['app']['App']['ExportQuery'](arg1, arg2, arg3, arg4, arg5);
|
||||
}
|
||||
|
||||
export function ExportTable(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTable'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function ExportTablesDataSQL(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['ExportTablesDataSQL'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportTablesSQL(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportTablesSQL'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function GetAppInfo() {
|
||||
return window['go']['app']['App']['GetAppInfo']();
|
||||
}
|
||||
|
||||
export function ImportConfigFile() {
|
||||
return window['go']['app']['App']['ImportConfigFile']();
|
||||
}
|
||||
@@ -70,6 +122,18 @@ export function ImportData(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['ImportData'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ImportDataWithProgress(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ImportDataWithProgress'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function InstallUpdateAndRestart() {
|
||||
return window['go']['app']['App']['InstallUpdateAndRestart']();
|
||||
}
|
||||
|
||||
export function MongoDiscoverMembers(arg1) {
|
||||
return window['go']['app']['App']['MongoDiscoverMembers'](arg1);
|
||||
}
|
||||
|
||||
export function MySQLConnect(arg1) {
|
||||
return window['go']['app']['App']['MySQLConnect'](arg1);
|
||||
}
|
||||
@@ -94,6 +158,118 @@ export function OpenSQLFile() {
|
||||
return window['go']['app']['App']['OpenSQLFile']();
|
||||
}
|
||||
|
||||
export function PreviewImportFile(arg1) {
|
||||
return window['go']['app']['App']['PreviewImportFile'](arg1);
|
||||
}
|
||||
|
||||
export function RedisConnect(arg1) {
|
||||
return window['go']['app']['App']['RedisConnect'](arg1);
|
||||
}
|
||||
|
||||
export function RedisDeleteHashField(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisDeleteHashField'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisDeleteKeys(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisDeleteKeys'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisExecuteCommand(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisExecuteCommand'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisFlushDB(arg1) {
|
||||
return window['go']['app']['App']['RedisFlushDB'](arg1);
|
||||
}
|
||||
|
||||
export function RedisGetDatabases(arg1) {
|
||||
return window['go']['app']['App']['RedisGetDatabases'](arg1);
|
||||
}
|
||||
|
||||
export function RedisGetServerInfo(arg1) {
|
||||
return window['go']['app']['App']['RedisGetServerInfo'](arg1);
|
||||
}
|
||||
|
||||
export function RedisGetValue(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisGetValue'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisListPush(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisListPush'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisListSet(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisListSet'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisRenameKey(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisRenameKey'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisScanKeys(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisScanKeys'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisSelectDB(arg1, arg2) {
|
||||
return window['go']['app']['App']['RedisSelectDB'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function RedisSetAdd(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisSetAdd'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisSetHashField(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisSetHashField'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisSetRemove(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisSetRemove'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisSetString(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisSetString'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisSetTTL(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisSetTTL'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisStreamAdd(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RedisStreamAdd'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RedisStreamDelete(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisStreamDelete'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisTestConnection(arg1) {
|
||||
return window['go']['app']['App']['RedisTestConnection'](arg1);
|
||||
}
|
||||
|
||||
export function RedisZSetAdd(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisZSetAdd'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RedisZSetRemove(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RedisZSetRemove'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RenameDatabase(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['RenameDatabase'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function RenameTable(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RenameTable'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function RenameView(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['RenameView'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function SetWindowTranslucency(arg1, arg2) {
|
||||
return window['go']['app']['App']['SetWindowTranslucency'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function TestConnection(arg1) {
|
||||
return window['go']['app']['App']['TestConnection'](arg1);
|
||||
}
|
||||
|
||||
@@ -74,12 +74,26 @@ export namespace connection {
|
||||
port: number;
|
||||
user: string;
|
||||
password: string;
|
||||
savePassword?: boolean;
|
||||
database: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number;
|
||||
uri?: string;
|
||||
hosts?: string[];
|
||||
topology?: string;
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
authSource?: string;
|
||||
readPreference?: string;
|
||||
mongoSrv?: boolean;
|
||||
mongoAuthMechanism?: string;
|
||||
mongoReplicaUser?: string;
|
||||
mongoReplicaPassword?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ConnectionConfig(source);
|
||||
@@ -92,12 +106,26 @@ export namespace connection {
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
this.savePassword = source["savePassword"];
|
||||
this.database = source["database"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
this.redisDB = source["redisDB"];
|
||||
this.uri = source["uri"];
|
||||
this.hosts = source["hosts"];
|
||||
this.topology = source["topology"];
|
||||
this.mysqlReplicaUser = source["mysqlReplicaUser"];
|
||||
this.mysqlReplicaPassword = source["mysqlReplicaPassword"];
|
||||
this.replicaSet = source["replicaSet"];
|
||||
this.authSource = source["authSource"];
|
||||
this.readPreference = source["readPreference"];
|
||||
this.mongoSrv = source["mongoSrv"];
|
||||
this.mongoAuthMechanism = source["mongoAuthMechanism"];
|
||||
this.mongoReplicaUser = source["mongoReplicaUser"];
|
||||
this.mongoReplicaPassword = source["mongoReplicaPassword"];
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
@@ -140,13 +168,58 @@ export namespace connection {
|
||||
|
||||
}
|
||||
|
||||
export namespace redis {
|
||||
|
||||
export class ZSetMember {
|
||||
member: string;
|
||||
score: number;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ZSetMember(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.member = source["member"];
|
||||
this.score = source["score"];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export namespace sync {
|
||||
|
||||
export class TableOptions {
|
||||
insert?: boolean;
|
||||
update?: boolean;
|
||||
delete?: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new TableOptions(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.insert = source["insert"];
|
||||
this.update = source["update"];
|
||||
this.delete = source["delete"];
|
||||
this.selectedInsertPks = source["selectedInsertPks"];
|
||||
this.selectedUpdatePks = source["selectedUpdatePks"];
|
||||
this.selectedDeletePks = source["selectedDeletePks"];
|
||||
}
|
||||
}
|
||||
export class SyncConfig {
|
||||
sourceConfig: connection.ConnectionConfig;
|
||||
targetConfig: connection.ConnectionConfig;
|
||||
tables: string[];
|
||||
content?: string;
|
||||
mode: string;
|
||||
jobId?: string;
|
||||
autoAddColumns?: boolean;
|
||||
tableOptions?: Record<string, TableOptions>;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new SyncConfig(source);
|
||||
@@ -157,7 +230,11 @@ export namespace sync {
|
||||
this.sourceConfig = this.convertValues(source["sourceConfig"], connection.ConnectionConfig);
|
||||
this.targetConfig = this.convertValues(source["targetConfig"], connection.ConnectionConfig);
|
||||
this.tables = source["tables"];
|
||||
this.content = source["content"];
|
||||
this.mode = source["mode"];
|
||||
this.jobId = source["jobId"];
|
||||
this.autoAddColumns = source["autoAddColumns"];
|
||||
this.tableOptions = this.convertValues(source["tableOptions"], TableOptions, true);
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
|
||||
30
go.mod
30
go.mod
@@ -6,23 +6,37 @@ require (
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3
|
||||
gitee.com/chunanyong/dm v1.8.22
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/microsoft/go-mssqldb v1.9.6
|
||||
github.com/redis/go-redis/v9 v9.17.3
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/taosdata/driver-go/v3 v3.7.8
|
||||
github.com/wailsapp/wails/v2 v2.11.0
|
||||
github.com/xuri/excelize/v2 v2.10.0
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/text v0.33.0
|
||||
modernc.org/sqlite v1.44.3
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/bep/debounce v1.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
|
||||
github.com/golang-sql/sqlexp v0.1.0 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.6 // indirect
|
||||
github.com/labstack/echo/v4 v4.13.3 // indirect
|
||||
github.com/labstack/gommon v0.4.2 // indirect
|
||||
github.com/leaanthony/go-ansi-parser v1.6.1 // indirect
|
||||
@@ -31,22 +45,36 @@ require (
|
||||
github.com/leaanthony/u v1.1.1 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/richardlehane/mscfb v1.0.4 // indirect
|
||||
github.com/richardlehane/msoleps v1.0.4 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/samber/lo v1.49.1 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/tiendc/go-deepcopy v1.7.1 // indirect
|
||||
github.com/tkrajina/go-reflector v0.5.8 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.2 // indirect
|
||||
github.com/wailsapp/go-webview2 v1.0.22 // indirect
|
||||
github.com/wailsapp/mimetype v1.4.1 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.2.0 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/xuri/efp v0.0.1 // indirect
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/text v0.33.0 // indirect
|
||||
modernc.org/libc v1.67.6 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/highgo/pq-sm3 => ./third_party/highgo-pq
|
||||
|
||||
111
go.sum
111
go.sum
@@ -4,10 +4,31 @@ gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3 h1:QjslQNaH5Nuap5i4ni
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3/go.mod h1:7lH5A1jzCXD9Nl16DzaBUOfDAT8NPrDmZwKu1p5wf94=
|
||||
gitee.com/chunanyong/dm v1.8.22 h1:H7fsrnUIvEA0jlDWew7vwELry1ff+tLMIu2Fk2cIBSg=
|
||||
gitee.com/chunanyong/dm v1.8.22/go.mod h1:EPRJnuPFgbyOFgJ0TRYCTGzhq+ZT4wdyaj/GW/LLcNg=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1 h1:Wgf5rZba3YZqeTNJPtvqZoBu1sBN/L4sry+u2U3Y75w=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.3.1/go.mod h1:xxCBG/f/4Vbmh2XQJBsOmNdxWUY5j/s27jujKPbQf14=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuoEKg+gImo7pvkiQEFAc8ocibADgXeiLAxWhWmkI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY=
|
||||
github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
@@ -16,19 +37,37 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e h1:Q3+PugElBCf4PFpxhErSzU3/PY5sFL5Z6rfv4AbGAck=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI=
|
||||
github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/labstack/echo/v4 v4.13.3 h1:pwhpCPrTl5qry5HRdM5FwdXnhXSLSY+WE+YQSeCaafY=
|
||||
github.com/labstack/echo/v4 v4.13.3/go.mod h1:o90YNEeQWjDozo584l7AwhJMHN0bOC4tAfg+Xox9q5g=
|
||||
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
|
||||
@@ -53,6 +92,12 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/microsoft/go-mssqldb v1.9.6 h1:1MNQg5UiSsokiPz3++K2KPx4moKrwIqly1wv+RyCKTw=
|
||||
github.com/microsoft/go-mssqldb v1.9.6/go.mod h1:yYMPDufyoF2vVuVCUGtZARr06DKFIhMrluTcgWlXpr4=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
@@ -61,17 +106,38 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/redis/go-redis/v9 v9.17.3 h1:fN29NdNrE17KttK5Ndf20buqfDZwGNgoUr9qjl1DQx4=
|
||||
github.com/redis/go-redis/v9 v9.17.3/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/richardlehane/mscfb v1.0.4 h1:WULscsljNPConisD5hR0+OyZjwK46Pfyr6mPu5ZawpM=
|
||||
github.com/richardlehane/mscfb v1.0.4/go.mod h1:YzVpcZg9czvAuhk9T+a3avCpcFPMUWm7gK3DypaEsUk=
|
||||
github.com/richardlehane/msoleps v1.0.1/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||
github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM/9/g00=
|
||||
github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
|
||||
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYoCqhR2dU=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8 h1:N2H6HLLZH2ve2ipcoFgG9BJS+yW0XksqNYwEdSmHaJk=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8/go.mod h1:gSxBEPOueMg0rTmMO1Ug6aeD7AwGdDGvUtLrsDTTpYc=
|
||||
github.com/tiendc/go-deepcopy v1.7.1 h1:LnubftI6nYaaMOcaz0LphzwraqN8jiWTwm416sitff4=
|
||||
github.com/tiendc/go-deepcopy v1.7.1/go.mod h1:4bKjNC2r7boYOkD2IOuZpYjmlDdzjbpTRyCx+goBCJQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8 h1:yPADHrwmUbMq4RGEyaOUpz2H90sRsETNVpjzo3DLVQQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
@@ -84,35 +150,76 @@ github.com/wailsapp/mimetype v1.4.1 h1:pQN9ycO7uo4vsUUuPeHEYoUkLVkaRntMnHJxVwYhw
|
||||
github.com/wailsapp/mimetype v1.4.1/go.mod h1:9aV5k31bBOv5z6u+QP8TltzvNGJPmNJD4XlAL3U+j3o=
|
||||
github.com/wailsapp/wails/v2 v2.11.0 h1:seLacV8pqupq32IjS4Y7V8ucab0WZwtK6VvUVxSBtqQ=
|
||||
github.com/wailsapp/wails/v2 v2.11.0/go.mod h1:jrf0ZaM6+GBc1wRmXsM8cIvzlg0karYin3erahI4+0k=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=
|
||||
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/xuri/efp v0.0.1 h1:fws5Rv3myXyYni8uwj2qKjVaRP30PdjeYe2Y6FDsCL8=
|
||||
github.com/xuri/efp v0.0.1/go.mod h1:ybY/Jr0T0GTCnYjKqmdwxyxn2BQf2RcQIIvex5QldPI=
|
||||
github.com/xuri/excelize/v2 v2.10.0 h1:8aKsP7JD39iKLc6dH5Tw3dgV3sPRh8uRVXu/fMstfW4=
|
||||
github.com/xuri/excelize/v2 v2.10.0/go.mod h1:SC5TzhQkaOsTWpANfm+7bJCldzcnU/jrhqkTi/iBHBU=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 h1:+C0TIdyyYmzadGaL/HBLbf3WdLgC29pgyhTjAT/0nuE=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
||||
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
|
||||
golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||
|
||||
@@ -10,23 +10,33 @@ import (
|
||||
"net"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
)
|
||||
|
||||
const dbCachePingInterval = 30 * time.Second
|
||||
|
||||
type cachedDatabase struct {
|
||||
inst db.Database
|
||||
lastPing time.Time
|
||||
}
|
||||
|
||||
// App struct
|
||||
type App struct {
|
||||
ctx context.Context
|
||||
dbCache map[string]db.Database // Cache for DB connections
|
||||
mu sync.Mutex // Mutex for cache access
|
||||
ctx context.Context
|
||||
dbCache map[string]cachedDatabase // Cache for DB connections
|
||||
mu sync.RWMutex // Mutex for cache access
|
||||
updateMu sync.Mutex
|
||||
updateState updateState
|
||||
}
|
||||
|
||||
// NewApp creates a new App application struct
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
dbCache: make(map[string]db.Database),
|
||||
dbCache: make(map[string]cachedDatabase),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,19 +45,29 @@ func NewApp() *App {
|
||||
func (a *App) Startup(ctx context.Context) {
|
||||
a.ctx = ctx
|
||||
logger.Init()
|
||||
applyMacWindowTranslucencyFix()
|
||||
logger.Infof("应用启动完成")
|
||||
}
|
||||
|
||||
// SetWindowTranslucency 动态调整 macOS 窗口透明度。
|
||||
// 前端在加载用户外观设置后、以及用户修改外观时调用此方法。
|
||||
// opacity=1.0 且 blur=0 时窗口标记为 opaque,GPU 不再持续计算窗口背后的模糊合成。
|
||||
func (a *App) SetWindowTranslucency(opacity float64, blur float64) {
|
||||
setMacWindowTranslucency(opacity, blur)
|
||||
}
|
||||
|
||||
// Shutdown is called when the app terminates
|
||||
func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Infof("应用开始关闭,准备释放资源")
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
for _, dbInst := range a.dbCache {
|
||||
if err := dbInst.Close(); err != nil {
|
||||
if err := dbInst.inst.Close(); err != nil {
|
||||
logger.Error(err, "关闭数据库连接失败")
|
||||
}
|
||||
}
|
||||
// Close all Redis connections
|
||||
CloseAllRedisClients()
|
||||
logger.Infof("资源释放完成,应用已关闭")
|
||||
logger.Close()
|
||||
}
|
||||
@@ -90,10 +110,11 @@ type withLogHint struct {
|
||||
}
|
||||
|
||||
func (e withLogHint) Error() string {
|
||||
message := normalizeErrorMessage(e.err)
|
||||
if strings.TrimSpace(e.logPath) == "" {
|
||||
return e.err.Error()
|
||||
return message
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", e.err.Error(), e.logPath)
|
||||
return fmt.Sprintf("%s(详细日志:%s)", message, e.logPath)
|
||||
}
|
||||
|
||||
func (e withLogHint) Unwrap() error {
|
||||
@@ -115,6 +136,33 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
b.WriteString(fmt.Sprintf("类型=%s 地址=%s:%d 数据库=%s 用户=%s 超时=%ds",
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds))
|
||||
|
||||
if len(config.Hosts) > 0 {
|
||||
b.WriteString(fmt.Sprintf(" 节点数=%d", len(config.Hosts)))
|
||||
}
|
||||
if strings.TrimSpace(config.Topology) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 拓扑=%s", strings.TrimSpace(config.Topology)))
|
||||
}
|
||||
if strings.TrimSpace(config.URI) != "" {
|
||||
b.WriteString(fmt.Sprintf(" URI=已配置(长度=%d)", len(config.URI)))
|
||||
}
|
||||
if strings.TrimSpace(config.MySQLReplicaUser) != "" {
|
||||
b.WriteString(" MySQL从库凭据=已配置")
|
||||
}
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "mongodb") {
|
||||
if strings.TrimSpace(config.MongoReplicaUser) != "" {
|
||||
b.WriteString(" Mongo从库凭据=已配置")
|
||||
}
|
||||
if strings.TrimSpace(config.ReplicaSet) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 副本集=%s", strings.TrimSpace(config.ReplicaSet)))
|
||||
}
|
||||
if strings.TrimSpace(config.ReadPreference) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 读偏好=%s", strings.TrimSpace(config.ReadPreference)))
|
||||
}
|
||||
if strings.TrimSpace(config.AuthSource) != "" {
|
||||
b.WriteString(fmt.Sprintf(" 认证库=%s", strings.TrimSpace(config.AuthSource)))
|
||||
}
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(fmt.Sprintf(" SSH=%s:%d 用户=%s", config.SSH.Host, config.SSH.Port, config.SSH.User))
|
||||
}
|
||||
@@ -134,35 +182,63 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (a *App) getDatabaseForcePing(config connection.ConnectionConfig) (db.Database, error) {
|
||||
return a.getDatabaseWithPing(config, true)
|
||||
}
|
||||
|
||||
// Helper: Get or create a database connection
|
||||
func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, error) {
|
||||
return a.getDatabaseWithPing(config, false)
|
||||
}
|
||||
|
||||
func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing bool) (db.Database, error) {
|
||||
key := getCacheKey(config)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
if config.UseSSH && config.Type != "mysql" {
|
||||
logger.Warnf("当前仅 MySQL 支持内置 SSH 直连,其他类型请使用本地端口转发:%s", formatConnSummary(config))
|
||||
}
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
a.mu.RLock()
|
||||
entry, ok := a.dbCache[key]
|
||||
a.mu.RUnlock()
|
||||
if ok {
|
||||
needPing := forcePing
|
||||
if !needPing {
|
||||
lastPing := entry.lastPing
|
||||
if lastPing.IsZero() || time.Since(lastPing) >= dbCachePingInterval {
|
||||
needPing = true
|
||||
}
|
||||
}
|
||||
|
||||
if dbInst, ok := a.dbCache[key]; ok {
|
||||
logger.Infof("命中连接缓存,开始检测可用性:缓存Key=%s", shortKey)
|
||||
if err := dbInst.Ping(); err == nil {
|
||||
logger.Infof("缓存连接可用:缓存Key=%s", shortKey)
|
||||
return dbInst, nil
|
||||
if !needPing {
|
||||
return entry.inst, nil
|
||||
}
|
||||
|
||||
if err := entry.inst.Ping(); err == nil {
|
||||
// Update lastPing (best effort)
|
||||
a.mu.Lock()
|
||||
if cur, exists := a.dbCache[key]; exists && cur.inst == entry.inst {
|
||||
cur.lastPing = time.Now()
|
||||
a.dbCache[key] = cur
|
||||
}
|
||||
a.mu.Unlock()
|
||||
return entry.inst, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存连接不可用,准备重建:缓存Key=%s", shortKey)
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
}
|
||||
if err := dbInst.Close(); err != nil {
|
||||
logger.Error(err, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
|
||||
// Ping failed: remove cached instance (best effort)
|
||||
a.mu.Lock()
|
||||
if cur, exists := a.dbCache[key]; exists && cur.inst == entry.inst {
|
||||
if err := cur.inst.Close(); err != nil {
|
||||
logger.Error(err, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
}
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
delete(a.dbCache, key)
|
||||
a.mu.Unlock()
|
||||
}
|
||||
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(config.Type)
|
||||
if err != nil {
|
||||
@@ -176,7 +252,18 @@ func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, erro
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
a.dbCache[key] = dbInst
|
||||
now := time.Now()
|
||||
|
||||
a.mu.Lock()
|
||||
if existing, exists := a.dbCache[key]; exists && existing.inst != nil {
|
||||
a.mu.Unlock()
|
||||
// Prefer existing cached connection to avoid cache racing duplicates.
|
||||
_ = dbInst.Close()
|
||||
return existing.inst, nil
|
||||
}
|
||||
a.dbCache[key] = cachedDatabase{inst: dbInst, lastPing: now}
|
||||
a.mu.Unlock()
|
||||
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
58
internal/app/db_context.go
Normal file
58
internal/app/db_context.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func normalizeRunConfig(config connection.ConnectionConfig, dbName string) connection.ConnectionConfig {
|
||||
runConfig := config
|
||||
name := strings.TrimSpace(dbName)
|
||||
if name == "" {
|
||||
return runConfig
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine":
|
||||
// 这些类型的 dbName 表示"数据库",需要写入连接配置以选择目标库。
|
||||
runConfig.Database = name
|
||||
case "dameng":
|
||||
// 达梦使用 schema 参数,沿用现有行为:dbName 表示 schema。
|
||||
runConfig.Database = name
|
||||
default:
|
||||
// oracle: dbName 表示 schema/owner,不能覆盖 config.Database(服务名)
|
||||
// sqlite: 无需设置 Database
|
||||
// custom: 语义不明确,避免污染缓存 key
|
||||
}
|
||||
|
||||
return runConfig
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// PG/金仓/瀚高/海量:dbName 在 UI 里是"数据库",schema 需从 tableName 或使用默认 public。
|
||||
return "public", rawTable
|
||||
case "sqlserver":
|
||||
// SQL Server:dbName 表示数据库,schema 默认 dbo
|
||||
return "dbo", rawTable
|
||||
default:
|
||||
// MySQL:dbName 表示数据库;Oracle/达梦:dbName 表示 schema/owner。
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
100
internal/app/error_text.go
Normal file
100
internal/app/error_text.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/encoding/simplifiedchinese"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
func normalizeErrorMessage(err error) string {
|
||||
if err == nil {
|
||||
return ""
|
||||
}
|
||||
return normalizeMixedEncodingText(err.Error())
|
||||
}
|
||||
|
||||
func normalizeMixedEncodingText(text string) string {
|
||||
if text == "" {
|
||||
return text
|
||||
}
|
||||
|
||||
raw := []byte(text)
|
||||
output := make([]byte, 0, len(raw)+16)
|
||||
suspect := make([]byte, 0, 16)
|
||||
|
||||
flushSuspect := func() {
|
||||
if len(suspect) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
fallback := strings.ToValidUTF8(string(suspect), "<22>")
|
||||
decoded, _, err := transform.Bytes(simplifiedchinese.GB18030.NewDecoder(), suspect)
|
||||
if err == nil && utf8.Valid(decoded) {
|
||||
candidate := string(decoded)
|
||||
if scoreDecodedText(candidate) > scoreDecodedText(fallback) {
|
||||
output = append(output, []byte(candidate)...)
|
||||
} else {
|
||||
output = append(output, []byte(fallback)...)
|
||||
}
|
||||
} else {
|
||||
output = append(output, []byte(fallback)...)
|
||||
}
|
||||
|
||||
suspect = suspect[:0]
|
||||
}
|
||||
|
||||
for len(raw) > 0 {
|
||||
r, size := utf8.DecodeRune(raw)
|
||||
if r == utf8.RuneError && size == 1 {
|
||||
suspect = append(suspect, raw[0])
|
||||
raw = raw[1:]
|
||||
continue
|
||||
}
|
||||
|
||||
if isLikelyMojibakeRune(r) {
|
||||
suspect = append(suspect, raw[:size]...)
|
||||
} else {
|
||||
flushSuspect()
|
||||
output = append(output, raw[:size]...)
|
||||
}
|
||||
raw = raw[size:]
|
||||
}
|
||||
|
||||
flushSuspect()
|
||||
return string(output)
|
||||
}
|
||||
|
||||
func isLikelyMojibakeRune(r rune) bool {
|
||||
if r == utf8.RuneError {
|
||||
return true
|
||||
}
|
||||
if r >= 0x00C0 && r <= 0x02FF {
|
||||
return true
|
||||
}
|
||||
if unicode.In(r, unicode.Hebrew, unicode.Arabic, unicode.Cyrillic, unicode.Greek) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func scoreDecodedText(text string) int {
|
||||
score := 0
|
||||
for _, r := range text {
|
||||
switch {
|
||||
case r == '<27>':
|
||||
score -= 6
|
||||
case unicode.Is(unicode.Han, r):
|
||||
score += 4
|
||||
case isLikelyMojibakeRune(r):
|
||||
score -= 3
|
||||
case unicode.IsPrint(r):
|
||||
score += 1
|
||||
default:
|
||||
score -= 2
|
||||
}
|
||||
}
|
||||
return score
|
||||
}
|
||||
25
internal/app/error_text_test.go
Normal file
25
internal/app/error_text_test.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package app
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeMixedEncodingText_GBKErrorMessage(t *testing.T) {
|
||||
raw := []byte("pq: ")
|
||||
raw = append(raw, 0xD3, 0xC3, 0xBB, 0xA7) // 用户
|
||||
raw = append(raw, []byte(` "root" Password `)...)
|
||||
raw = append(raw, 0xC8, 0xCF, 0xD6, 0xA4, 0xCA, 0xA7, 0xB0, 0xDC) // 认证失败
|
||||
raw = append(raw, []byte(" (28P01)")...)
|
||||
|
||||
got := normalizeMixedEncodingText(string(raw))
|
||||
want := `pq: 用户 "root" Password 认证失败 (28P01)`
|
||||
if got != want {
|
||||
t.Fatalf("normalizeMixedEncodingText() mismatch\nwant: %q\ngot: %q", want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeMixedEncodingText_KeepUTF8(t *testing.T) {
|
||||
input := `连接建立后验证失败:pq: password authentication failed for user "root"`
|
||||
got := normalizeMixedEncodingText(input)
|
||||
if got != input {
|
||||
t.Fatalf("expected unchanged utf8 text, got: %q", got)
|
||||
}
|
||||
}
|
||||
@@ -1,41 +1,79 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
)
|
||||
|
||||
// Generic DB Methods
|
||||
|
||||
func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
// getDatabase checks cache and Pings. If valid, reuses. If not, connects.
|
||||
_, err := a.getDatabase(config)
|
||||
// 连接测试需要强制 ping,避免缓存命中但连接已失效时误判成功。
|
||||
_, err := a.getDatabaseForcePing(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBConnect 连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
|
||||
logger.Infof("DBConnect 连接成功:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
func (a *App) TestConnection(config connection.ConnectionConfig) connection.QueryResult {
|
||||
_, err := a.getDatabase(config)
|
||||
_, err := a.getDatabaseForcePing(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "TestConnection 连接测试失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
|
||||
logger.Infof("TestConnection 连接测试成功:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
func (a *App) MongoDiscoverMembers(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "mongodb"
|
||||
|
||||
dbInst, err := a.getDatabaseForcePing(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "MongoDiscoverMembers 获取连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
discoverable, ok := dbInst.(interface {
|
||||
DiscoverMembers() (string, []connection.MongoMemberInfo, error)
|
||||
})
|
||||
if !ok {
|
||||
return connection.QueryResult{Success: false, Message: "当前 MongoDB 驱动不支持成员发现"}
|
||||
}
|
||||
|
||||
replicaSet, members, err := discoverable.DiscoverMembers()
|
||||
if err != nil {
|
||||
logger.Error(err, "MongoDiscoverMembers 执行失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"replicaSet": replicaSet,
|
||||
"members": members,
|
||||
}
|
||||
|
||||
logger.Infof("MongoDiscoverMembers 成功:%s 成员数=%d 副本集=%s", formatConnSummary(config), len(members), replicaSet)
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("发现 %d 个成员", len(members)),
|
||||
Data: data,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
runConfig.Database = ""
|
||||
runConfig.Database = ""
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -44,9 +82,16 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
|
||||
escapedDbName := strings.ReplaceAll(dbName, "`", "``")
|
||||
query := fmt.Sprintf("CREATE DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci", escapedDbName)
|
||||
if runConfig.Type == "postgres" {
|
||||
dbType := strings.ToLower(strings.TrimSpace(runConfig.Type))
|
||||
if dbType == "postgres" || dbType == "kingbase" || dbType == "highgo" || dbType == "vastbase" {
|
||||
escapedDbName = strings.ReplaceAll(dbName, `"`, `""`)
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%s\"", escapedDbName)
|
||||
} else if dbType == "tdengine" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "mariadb" {
|
||||
// MariaDB uses same syntax as MySQL
|
||||
} else if dbType == "sphinx" {
|
||||
return connection.QueryResult{Success: false, Message: "Sphinx 暂不支持创建数据库"}
|
||||
}
|
||||
|
||||
_, err = dbInst.Exec(query)
|
||||
@@ -57,6 +102,232 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: true, Message: "Database created successfully"}
|
||||
}
|
||||
|
||||
func resolveDDLDBType(config connection.ConnectionConfig) string {
|
||||
dbType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if dbType != "custom" {
|
||||
return dbType
|
||||
}
|
||||
|
||||
driver := strings.ToLower(strings.TrimSpace(config.Driver))
|
||||
switch driver {
|
||||
case "postgresql":
|
||||
return "postgres"
|
||||
case "dm":
|
||||
return "dameng"
|
||||
case "sqlite3":
|
||||
return "sqlite"
|
||||
case "sphinxql":
|
||||
return "sphinx"
|
||||
default:
|
||||
return driver
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTableByType(dbType string, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
return "public", rawTable
|
||||
default:
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
func quoteTableIdentByType(dbType string, schema string, table string) string {
|
||||
s := strings.TrimSpace(schema)
|
||||
t := strings.TrimSpace(table)
|
||||
if s == "" {
|
||||
return quoteIdentByType(dbType, t)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", quoteIdentByType(dbType, s), quoteIdentByType(dbType, t))
|
||||
}
|
||||
|
||||
func buildRunConfigForDDL(config connection.ConnectionConfig, dbType string, dbName string) connection.ConnectionConfig {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "custom") {
|
||||
// custom 连接的 dbName 语义依赖 driver,尽量在常见驱动上对齐内置类型行为。
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "vastbase", "dameng":
|
||||
if strings.TrimSpace(dbName) != "" {
|
||||
runConfig.Database = strings.TrimSpace(dbName)
|
||||
}
|
||||
}
|
||||
}
|
||||
return runConfig
|
||||
}
|
||||
|
||||
func (a *App) RenameDatabase(config connection.ConnectionConfig, oldName string, newName string) connection.QueryResult {
|
||||
oldName = strings.TrimSpace(oldName)
|
||||
newName = strings.TrimSpace(newName)
|
||||
if oldName == "" || newName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "数据库名称不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldName, newName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧数据库名称不能相同"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx":
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), oldName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再重命名"}
|
||||
}
|
||||
runConfig := config
|
||||
if strings.TrimSpace(runConfig.Database) == "" {
|
||||
runConfig.Database = "postgres"
|
||||
}
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
sql := fmt.Sprintf("ALTER DATABASE %s RENAME TO %s", quoteIdentByType(dbType, oldName), quoteIdentByType(dbType, newName))
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "数据库重命名成功"}
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名数据库", dbType)}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) DropDatabase(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
dbName = strings.TrimSpace(dbName)
|
||||
if dbName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "数据库名称不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
var (
|
||||
runConfig connection.ConnectionConfig
|
||||
sql string
|
||||
)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "tdengine":
|
||||
runConfig = config
|
||||
runConfig.Database = ""
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), dbName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再删除"}
|
||||
}
|
||||
runConfig = config
|
||||
if strings.TrimSpace(runConfig.Database) == "" {
|
||||
runConfig.Database = "postgres"
|
||||
}
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除数据库", dbType)}
|
||||
}
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "数据库删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, oldTableName string, newTableName string) connection.QueryResult {
|
||||
oldTableName = strings.TrimSpace(oldTableName)
|
||||
newTableName = strings.TrimSpace(newTableName)
|
||||
if oldTableName == "" || newTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldTableName, newTableName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧表名不能相同"}
|
||||
}
|
||||
if strings.Contains(newTableName, ".") {
|
||||
return connection.QueryResult{Success: false, Message: "新表名不能包含 schema 或数据库前缀"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "sqlite", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名表", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureOldTableName := normalizeSchemaAndTableByType(dbType, dbName, oldTableName)
|
||||
if pureOldTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "旧表名不能为空"}
|
||||
}
|
||||
oldQualifiedTable := quoteTableIdentByType(dbType, schemaName, pureOldTableName)
|
||||
newTableQuoted := quoteIdentByType(dbType, newTableName)
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx":
|
||||
newQualifiedTable := quoteTableIdentByType(dbType, schemaName, newTableName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualifiedTable, newQualifiedTable)
|
||||
case "sqlserver":
|
||||
// SQL Server 使用 sp_rename,参数为 'schema.oldname', 'newname'
|
||||
oldFullName := schemaName + "." + pureOldTableName
|
||||
escapedOld := strings.ReplaceAll(oldFullName, "'", "''")
|
||||
escapedNew := strings.ReplaceAll(newTableName, "'", "''")
|
||||
sql = fmt.Sprintf("EXEC sp_rename '%s', '%s'", escapedOld, escapedNew)
|
||||
default:
|
||||
sql = fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldQualifiedTable, newTableQuoted)
|
||||
}
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "表重命名成功"}
|
||||
}
|
||||
|
||||
func (a *App) DropTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
tableName = strings.TrimSpace(tableName)
|
||||
if tableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "sqlite", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除表", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureTableName := normalizeSchemaAndTableByType(dbType, dbName, tableName)
|
||||
if pureTableName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "表名不能为空"}
|
||||
}
|
||||
qualifiedTable := quoteTableIdentByType(dbType, schemaName, pureTableName)
|
||||
sql := fmt.Sprintf("DROP TABLE %s", qualifiedTable)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "表删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) MySQLConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "mysql"
|
||||
return a.DBConnect(config)
|
||||
@@ -83,10 +354,7 @@ func (a *App) MySQLShowCreateTable(config connection.ConnectionConfig, dbName st
|
||||
}
|
||||
|
||||
func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -94,16 +362,44 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
timeoutSeconds := runConfig.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
if strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain") {
|
||||
data, columns, err := dbInst.Query(query)
|
||||
isReadQuery := strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain")
|
||||
// MongoDB JSON 命令中的 find/count/aggregate 也属于读查询
|
||||
if !isReadQuery && strings.ToLower(strings.TrimSpace(runConfig.Type)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
isReadQuery = true
|
||||
}
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
if q, ok := dbInst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
data, columns, err = q.QueryContext(ctx, query)
|
||||
} else {
|
||||
data, columns, err = dbInst.Query(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
} else {
|
||||
affected, err := dbInst.Exec(query)
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -133,20 +429,17 @@ func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.Quer
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
|
||||
var resData []map[string]string
|
||||
for _, name := range dbs {
|
||||
resData = append(resData, map[string]string{"Database": name})
|
||||
}
|
||||
|
||||
|
||||
return connection.QueryResult{Success: true, Data: resData}
|
||||
}
|
||||
|
||||
func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -169,10 +462,7 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -180,7 +470,8 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
sqlStr, err := dbInst.GetCreateStatement(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
sqlStr, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取建表语句失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -190,17 +481,15 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
}
|
||||
|
||||
func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
columns, err := dbInst.GetColumns(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
columns, err := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -209,17 +498,15 @@ func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, ta
|
||||
}
|
||||
|
||||
func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
indexes, err := dbInst.GetIndexes(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
indexes, err := dbInst.GetIndexes(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -228,17 +515,15 @@ func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, ta
|
||||
}
|
||||
|
||||
func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
fks, err := dbInst.GetForeignKeys(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
fks, err := dbInst.GetForeignKeys(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -247,17 +532,15 @@ func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string
|
||||
}
|
||||
|
||||
func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
triggers, err := dbInst.GetTriggers(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
triggers, err := dbInst.GetTriggers(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -265,12 +548,128 @@ func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, t
|
||||
return connection.QueryResult{Success: true, Data: triggers}
|
||||
}
|
||||
|
||||
func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewName string) connection.QueryResult {
|
||||
viewName = strings.TrimSpace(viewName)
|
||||
if viewName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "sqlite", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除视图", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureViewName := normalizeSchemaAndTableByType(dbType, dbName, viewName)
|
||||
if pureViewName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
qualifiedView := quoteTableIdentByType(dbType, schemaName, pureViewName)
|
||||
sql := fmt.Sprintf("DROP VIEW %s", qualifiedView)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "视图删除成功"}
|
||||
}
|
||||
|
||||
func (a *App) DropFunction(config connection.ConnectionConfig, dbName string, routineName string, routineType string) connection.QueryResult {
|
||||
routineName = strings.TrimSpace(routineName)
|
||||
routineType = strings.TrimSpace(strings.ToUpper(routineType))
|
||||
if routineName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "函数/存储过程名称不能为空"}
|
||||
}
|
||||
if routineType != "FUNCTION" && routineType != "PROCEDURE" {
|
||||
routineType = "FUNCTION"
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx", "postgres", "kingbase", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除函数/存储过程", dbType)}
|
||||
}
|
||||
|
||||
schemaName, pureName := normalizeSchemaAndTableByType(dbType, dbName, routineName)
|
||||
if pureName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "函数/存储过程名称不能为空"}
|
||||
}
|
||||
qualifiedName := quoteTableIdentByType(dbType, schemaName, pureName)
|
||||
sql := fmt.Sprintf("DROP %s %s", routineType, qualifiedName)
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
label := "函数"
|
||||
if routineType == "PROCEDURE" {
|
||||
label = "存储过程"
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: fmt.Sprintf("%s删除成功", label)}
|
||||
}
|
||||
|
||||
func (a *App) RenameView(config connection.ConnectionConfig, dbName string, oldName string, newName string) connection.QueryResult {
|
||||
oldName = strings.TrimSpace(oldName)
|
||||
newName = strings.TrimSpace(newName)
|
||||
if oldName == "" || newName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "视图名称不能为空"}
|
||||
}
|
||||
if strings.EqualFold(oldName, newName) {
|
||||
return connection.QueryResult{Success: false, Message: "新旧视图名称不能相同"}
|
||||
}
|
||||
if strings.Contains(newName, ".") {
|
||||
return connection.QueryResult{Success: false, Message: "新视图名不能包含 schema 或数据库前缀"}
|
||||
}
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
schemaName, pureOldName := normalizeSchemaAndTableByType(dbType, dbName, oldName)
|
||||
if pureOldName == "" {
|
||||
return connection.QueryResult{Success: false, Message: "旧视图名不能为空"}
|
||||
}
|
||||
oldQualified := quoteTableIdentByType(dbType, schemaName, pureOldName)
|
||||
newQuoted := quoteIdentByType(dbType, newName)
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx":
|
||||
newQualified := quoteTableIdentByType(dbType, schemaName, newName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualified, newQualified)
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
sql = fmt.Sprintf("ALTER VIEW %s RENAME TO %s", oldQualified, newQuoted)
|
||||
case "sqlserver":
|
||||
oldFullName := schemaName + "." + pureOldName
|
||||
escapedOld := strings.ReplaceAll(oldFullName, "'", "''")
|
||||
escapedNew := strings.ReplaceAll(newName, "'", "''")
|
||||
sql = fmt.Sprintf("EXEC sp_rename '%s', '%s'", escapedOld, escapedNew)
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名视图", dbType)}
|
||||
}
|
||||
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if _, err := dbInst.Exec(sql); err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "视图重命名成功"}
|
||||
}
|
||||
|
||||
func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
515
internal/app/methods_redis.go
Normal file
515
internal/app/methods_redis.go
Normal file
@@ -0,0 +1,515 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/redis"
|
||||
)
|
||||
|
||||
// Redis client cache
|
||||
var (
|
||||
redisCache = make(map[string]redis.RedisClient)
|
||||
redisCacheMu sync.Mutex
|
||||
)
|
||||
|
||||
// getRedisClient gets or creates a Redis client from cache
|
||||
func (a *App) getRedisClient(config connection.ConnectionConfig) (redis.RedisClient, error) {
|
||||
key := getRedisClientCacheKey(config)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
logger.Infof("获取 Redis 连接:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
|
||||
redisCacheMu.Lock()
|
||||
defer redisCacheMu.Unlock()
|
||||
|
||||
if client, ok := redisCache[key]; ok {
|
||||
logger.Infof("命中 Redis 连接缓存,开始检测可用性:缓存Key=%s", shortKey)
|
||||
if err := client.Ping(); err == nil {
|
||||
logger.Infof("缓存 Redis 连接可用:缓存Key=%s", shortKey)
|
||||
return client, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存 Redis 连接不可用,准备重建:缓存Key=%s", shortKey)
|
||||
}
|
||||
client.Close()
|
||||
delete(redisCache, key)
|
||||
}
|
||||
|
||||
logger.Infof("创建 Redis 客户端实例:缓存Key=%s", shortKey)
|
||||
client := redis.NewRedisClient()
|
||||
if err := client.Connect(config); err != nil {
|
||||
logger.Error(err, "Redis 连接失败:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
redisCache[key] = client
|
||||
logger.Infof("Redis 连接成功并写入缓存:%s 缓存Key=%s", formatRedisConnSummary(config), shortKey)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func getRedisClientCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
}
|
||||
b, _ := json.Marshal(config)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func formatRedisConnSummary(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString("类型=redis 地址=")
|
||||
b.WriteString(config.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.Port + '0')))
|
||||
b.WriteString(" DB=")
|
||||
b.WriteString(string(rune(config.RedisDB + '0')))
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(" SSH=")
|
||||
b.WriteString(config.SSH.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.SSH.Port + '0')))
|
||||
b.WriteString(" 用户=")
|
||||
b.WriteString(config.SSH.User)
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// RedisConnect tests a Redis connection
|
||||
func (a *App) RedisConnect(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
_, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisConnect 连接失败:%s", formatRedisConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
logger.Infof("RedisConnect 连接成功:%s", formatRedisConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
// RedisTestConnection tests a Redis connection (alias for RedisConnect)
|
||||
func (a *App) RedisTestConnection(config connection.ConnectionConfig) connection.QueryResult {
|
||||
return a.RedisConnect(config)
|
||||
}
|
||||
|
||||
// RedisScanKeys scans keys matching a pattern
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor uint64, count int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, cursor, count)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisScanKeys 扫描失败:pattern=%s", pattern)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: result}
|
||||
}
|
||||
|
||||
// RedisGetValue gets the value of a key
|
||||
func (a *App) RedisGetValue(config connection.ConnectionConfig, key string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
value, err := client.GetValue(key)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisGetValue 获取失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: value}
|
||||
}
|
||||
|
||||
// RedisSetString sets a string value
|
||||
func (a *App) RedisSetString(config connection.ConnectionConfig, key, value string, ttl int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SetString(key, value, ttl); err != nil {
|
||||
logger.Error(err, "RedisSetString 设置失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "设置成功"}
|
||||
}
|
||||
|
||||
// RedisSetHashField sets a field in a hash
|
||||
func (a *App) RedisSetHashField(config connection.ConnectionConfig, key, field, value string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SetHashField(key, field, value); err != nil {
|
||||
logger.Error(err, "RedisSetHashField 设置失败:key=%s field=%s", key, field)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "设置成功"}
|
||||
}
|
||||
|
||||
// RedisDeleteKeys deletes one or more keys
|
||||
func (a *App) RedisDeleteKeys(config connection.ConnectionConfig, keys []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
deleted, err := client.DeleteKeys(keys)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisDeleteKeys 删除失败:keys=%v", keys)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"deleted": deleted}}
|
||||
}
|
||||
|
||||
// RedisSetTTL sets the TTL of a key
|
||||
func (a *App) RedisSetTTL(config connection.ConnectionConfig, key string, ttl int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SetTTL(key, ttl); err != nil {
|
||||
logger.Error(err, "RedisSetTTL 设置失败:key=%s ttl=%d", key, ttl)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "设置成功"}
|
||||
}
|
||||
|
||||
// RedisExecuteCommand executes a raw Redis command
|
||||
func (a *App) RedisExecuteCommand(config connection.ConnectionConfig, command string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
// Parse command string into args
|
||||
args := parseRedisCommand(command)
|
||||
if len(args) == 0 {
|
||||
return connection.QueryResult{Success: false, Message: "命令不能为空"}
|
||||
}
|
||||
|
||||
result, err := client.ExecuteCommand(args)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisExecuteCommand 执行失败:command=%s", command)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: result}
|
||||
}
|
||||
|
||||
// parseRedisCommand parses a Redis command string into arguments
|
||||
func parseRedisCommand(command string) []string {
|
||||
command = strings.TrimSpace(command)
|
||||
if command == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
var args []string
|
||||
var current strings.Builder
|
||||
inQuote := false
|
||||
quoteChar := rune(0)
|
||||
|
||||
for _, ch := range command {
|
||||
if inQuote {
|
||||
if ch == quoteChar {
|
||||
inQuote = false
|
||||
args = append(args, current.String())
|
||||
current.Reset()
|
||||
} else {
|
||||
current.WriteRune(ch)
|
||||
}
|
||||
} else {
|
||||
if ch == '"' || ch == '\'' {
|
||||
inQuote = true
|
||||
quoteChar = ch
|
||||
} else if ch == ' ' || ch == '\t' {
|
||||
if current.Len() > 0 {
|
||||
args = append(args, current.String())
|
||||
current.Reset()
|
||||
}
|
||||
} else {
|
||||
current.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if current.Len() > 0 {
|
||||
args = append(args, current.String())
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// RedisGetServerInfo returns server information
|
||||
func (a *App) RedisGetServerInfo(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
info, err := client.GetServerInfo()
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisGetServerInfo 获取失败")
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: info}
|
||||
}
|
||||
|
||||
// RedisGetDatabases returns information about all databases
|
||||
func (a *App) RedisGetDatabases(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
dbs, err := client.GetDatabases()
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisGetDatabases 获取失败")
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: dbs}
|
||||
}
|
||||
|
||||
// RedisSelectDB selects a database
|
||||
func (a *App) RedisSelectDB(config connection.ConnectionConfig, dbIndex int) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
config.RedisDB = dbIndex
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SelectDB(dbIndex); err != nil {
|
||||
logger.Error(err, "RedisSelectDB 切换失败:db=%d", dbIndex)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "切换成功"}
|
||||
}
|
||||
|
||||
// RedisRenameKey renames a key
|
||||
func (a *App) RedisRenameKey(config connection.ConnectionConfig, oldKey, newKey string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.RenameKey(oldKey, newKey); err != nil {
|
||||
logger.Error(err, "RedisRenameKey 重命名失败:%s -> %s", oldKey, newKey)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "重命名成功"}
|
||||
}
|
||||
|
||||
// RedisDeleteHashField deletes fields from a hash
|
||||
func (a *App) RedisDeleteHashField(config connection.ConnectionConfig, key string, fields []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.DeleteHashField(key, fields...); err != nil {
|
||||
logger.Error(err, "RedisDeleteHashField 删除失败:key=%s fields=%v", key, fields)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "删除成功"}
|
||||
}
|
||||
|
||||
// RedisListPush pushes values to a list
|
||||
func (a *App) RedisListPush(config connection.ConnectionConfig, key string, values []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.ListPush(key, values...); err != nil {
|
||||
logger.Error(err, "RedisListPush 添加失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "添加成功"}
|
||||
}
|
||||
|
||||
// RedisListSet sets a value at an index in a list
|
||||
func (a *App) RedisListSet(config connection.ConnectionConfig, key string, index int64, value string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.ListSet(key, index, value); err != nil {
|
||||
logger.Error(err, "RedisListSet 设置失败:key=%s index=%d", key, index)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "设置成功"}
|
||||
}
|
||||
|
||||
// RedisSetAdd adds members to a set
|
||||
func (a *App) RedisSetAdd(config connection.ConnectionConfig, key string, members []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SetAdd(key, members...); err != nil {
|
||||
logger.Error(err, "RedisSetAdd 添加失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "添加成功"}
|
||||
}
|
||||
|
||||
// RedisSetRemove removes members from a set
|
||||
func (a *App) RedisSetRemove(config connection.ConnectionConfig, key string, members []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.SetRemove(key, members...); err != nil {
|
||||
logger.Error(err, "RedisSetRemove 删除失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "删除成功"}
|
||||
}
|
||||
|
||||
// RedisZSetAdd adds members to a sorted set
|
||||
func (a *App) RedisZSetAdd(config connection.ConnectionConfig, key string, members []redis.ZSetMember) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.ZSetAdd(key, members...); err != nil {
|
||||
logger.Error(err, "RedisZSetAdd 添加失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "添加成功"}
|
||||
}
|
||||
|
||||
// RedisZSetRemove removes members from a sorted set
|
||||
func (a *App) RedisZSetRemove(config connection.ConnectionConfig, key string, members []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.ZSetRemove(key, members...); err != nil {
|
||||
logger.Error(err, "RedisZSetRemove 删除失败:key=%s", key)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "删除成功"}
|
||||
}
|
||||
|
||||
// RedisStreamAdd adds an entry to a stream
|
||||
func (a *App) RedisStreamAdd(config connection.ConnectionConfig, key string, fields map[string]string, id string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
newID, err := client.StreamAdd(key, fields, id)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisStreamAdd 添加失败:key=%s id=%s", key, id)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "添加成功", Data: map[string]string{"id": newID}}
|
||||
}
|
||||
|
||||
// RedisStreamDelete deletes stream entries by IDs
|
||||
func (a *App) RedisStreamDelete(config connection.ConnectionConfig, key string, ids []string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
deleted, err := client.StreamDelete(key, ids...)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisStreamDelete 删除失败:key=%s ids=%v", key, ids)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "删除成功", Data: map[string]int64{"deleted": deleted}}
|
||||
}
|
||||
|
||||
// RedisFlushDB flushes the current database
|
||||
func (a *App) RedisFlushDB(config connection.ConnectionConfig) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if err := client.FlushDB(); err != nil {
|
||||
logger.Error(err, "RedisFlushDB 清空失败")
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "清空成功"}
|
||||
}
|
||||
|
||||
// CloseAllRedisClients closes all cached Redis clients (called on shutdown)
|
||||
func CloseAllRedisClients() {
|
||||
redisCacheMu.Lock()
|
||||
defer redisCacheMu.Unlock()
|
||||
|
||||
for key, client := range redisCache {
|
||||
if client != nil {
|
||||
client.Close()
|
||||
logger.Infof("已关闭 Redis 连接:%s", key[:12])
|
||||
}
|
||||
}
|
||||
redisCache = make(map[string]redis.RedisClient)
|
||||
}
|
||||
@@ -1,11 +1,99 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/sync"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
// DataSync executes a data synchronization task
|
||||
func (a *App) DataSync(config sync.SyncConfig) sync.SyncResult {
|
||||
engine := sync.NewSyncEngine()
|
||||
return engine.RunSync(config)
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("sync-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.RunSync(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// DataSyncAnalyze analyzes differences between source and target for the given tables (dry-run).
|
||||
func (a *App) DataSyncAnalyze(config sync.SyncConfig) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("analyze-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.Analyze(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
if !res.Success {
|
||||
return connection.QueryResult{Success: false, Message: res.Message, Data: res}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: res.Message, Data: res}
|
||||
}
|
||||
|
||||
// DataSyncPreview returns a limited preview of diff rows for one table.
|
||||
func (a *App) DataSyncPreview(config sync.SyncConfig, tableName string, limit int) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("preview-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
engine := sync.NewSyncEngine(sync.Reporter{})
|
||||
preview, err := engine.Preview(config, tableName, limit)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: preview}
|
||||
}
|
||||
|
||||
972
internal/app/methods_update.go
Normal file
972
internal/app/methods_update.go
Normal file
@@ -0,0 +1,972 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
stdRuntime "runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
|
||||
wailsRuntime "github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
const (
|
||||
updateRepo = "Syngnat/GoNavi"
|
||||
updateAPIURL = "https://api.github.com/repos/" + updateRepo + "/releases/latest"
|
||||
updateChecksumAsset = "SHA256SUMS"
|
||||
updateDownloadProgressEvent = "update:download-progress"
|
||||
)
|
||||
|
||||
type updateState struct {
|
||||
lastCheck *UpdateInfo
|
||||
downloading bool
|
||||
staged *stagedUpdate
|
||||
}
|
||||
|
||||
type UpdateInfo struct {
|
||||
HasUpdate bool `json:"hasUpdate"`
|
||||
CurrentVersion string `json:"currentVersion"`
|
||||
LatestVersion string `json:"latestVersion"`
|
||||
ReleaseName string `json:"releaseName"`
|
||||
ReleaseNotesURL string `json:"releaseNotesUrl"`
|
||||
AssetName string `json:"assetName"`
|
||||
AssetURL string `json:"assetUrl"`
|
||||
AssetSize int64 `json:"assetSize"`
|
||||
SHA256 string `json:"sha256"`
|
||||
}
|
||||
|
||||
type AppInfo struct {
|
||||
Version string `json:"version"`
|
||||
Author string `json:"author"`
|
||||
RepoURL string `json:"repoUrl,omitempty"`
|
||||
IssueURL string `json:"issueUrl,omitempty"`
|
||||
ReleaseURL string `json:"releaseUrl,omitempty"`
|
||||
BuildTime string `json:"buildTime,omitempty"`
|
||||
}
|
||||
|
||||
type updateDownloadResult struct {
|
||||
Info UpdateInfo `json:"info"`
|
||||
DownloadPath string `json:"downloadPath,omitempty"`
|
||||
InstallLogPath string `json:"installLogPath,omitempty"`
|
||||
InstallTarget string `json:"installTarget,omitempty"`
|
||||
Platform string `json:"platform"`
|
||||
AutoRelaunch bool `json:"autoRelaunch"`
|
||||
}
|
||||
|
||||
type updateDownloadProgressPayload struct {
|
||||
Status string `json:"status"`
|
||||
Percent float64 `json:"percent"`
|
||||
Downloaded int64 `json:"downloaded"`
|
||||
Total int64 `json:"total"`
|
||||
Message string `json:"message,omitempty"`
|
||||
}
|
||||
|
||||
type stagedUpdate struct {
|
||||
Version string
|
||||
AssetName string
|
||||
FilePath string
|
||||
StagedDir string
|
||||
InstallLogPath string
|
||||
}
|
||||
|
||||
type githubRelease struct {
|
||||
TagName string `json:"tag_name"`
|
||||
Name string `json:"name"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
Prerelease bool `json:"prerelease"`
|
||||
Assets []githubAsset `json:"assets"`
|
||||
}
|
||||
|
||||
type githubAsset struct {
|
||||
Name string `json:"name"`
|
||||
BrowserDownloadURL string `json:"browser_download_url"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
func (a *App) CheckForUpdates() connection.QueryResult {
|
||||
info, err := fetchLatestUpdateInfo()
|
||||
if err != nil {
|
||||
logger.Error(err, "检查更新失败")
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
a.updateMu.Lock()
|
||||
a.updateState.lastCheck = &info
|
||||
a.updateMu.Unlock()
|
||||
|
||||
msg := "已是最新版本"
|
||||
if info.HasUpdate {
|
||||
msg = fmt.Sprintf("发现新版本:%s", info.LatestVersion)
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: msg, Data: info}
|
||||
}
|
||||
|
||||
func (a *App) GetAppInfo() connection.QueryResult {
|
||||
info := AppInfo{
|
||||
Version: getCurrentVersion(),
|
||||
Author: getCurrentAuthor(),
|
||||
RepoURL: "https://github.com/" + updateRepo,
|
||||
IssueURL: "https://github.com/" + updateRepo + "/issues",
|
||||
ReleaseURL: "https://github.com/" + updateRepo + "/releases",
|
||||
BuildTime: strings.TrimSpace(AppBuildTime),
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: info}
|
||||
}
|
||||
|
||||
func (a *App) DownloadUpdate() connection.QueryResult {
|
||||
a.updateMu.Lock()
|
||||
if a.updateState.downloading {
|
||||
a.updateMu.Unlock()
|
||||
return connection.QueryResult{Success: false, Message: "更新包正在下载中,请稍后重试"}
|
||||
}
|
||||
info := a.updateState.lastCheck
|
||||
if info == nil {
|
||||
a.updateMu.Unlock()
|
||||
return connection.QueryResult{Success: false, Message: "请先检查更新"}
|
||||
}
|
||||
if !info.HasUpdate {
|
||||
a.updateMu.Unlock()
|
||||
return connection.QueryResult{Success: false, Message: "当前已是最新版本"}
|
||||
}
|
||||
if info.AssetURL == "" || info.AssetName == "" {
|
||||
a.updateMu.Unlock()
|
||||
return connection.QueryResult{Success: false, Message: "未找到可用的更新包"}
|
||||
}
|
||||
staged := a.updateState.staged
|
||||
if staged != nil && staged.Version == info.LatestVersion {
|
||||
a.updateMu.Unlock()
|
||||
return connection.QueryResult{Success: true, Message: "更新包已下载完成", Data: buildUpdateDownloadResult(*info, staged)}
|
||||
}
|
||||
a.updateState.downloading = true
|
||||
a.updateMu.Unlock()
|
||||
|
||||
a.emitUpdateDownloadProgress("start", 0, info.AssetSize, "")
|
||||
result := a.downloadAndStageUpdate(*info)
|
||||
|
||||
a.updateMu.Lock()
|
||||
a.updateState.downloading = false
|
||||
a.updateMu.Unlock()
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (a *App) InstallUpdateAndRestart() connection.QueryResult {
|
||||
a.updateMu.Lock()
|
||||
staged := a.updateState.staged
|
||||
if staged != nil && strings.TrimSpace(staged.InstallLogPath) == "" {
|
||||
staged.InstallLogPath = buildUpdateInstallLogPath(filepath.Dir(staged.FilePath))
|
||||
}
|
||||
a.updateMu.Unlock()
|
||||
if staged == nil {
|
||||
return connection.QueryResult{Success: false, Message: "未找到已下载的更新包"}
|
||||
}
|
||||
|
||||
if err := launchUpdateScript(staged); err != nil {
|
||||
logger.Error(err, "启动更新脚本失败")
|
||||
msg := err.Error()
|
||||
if staged.InstallLogPath != "" {
|
||||
msg = fmt.Sprintf("%s(更新日志:%s)", msg, staged.InstallLogPath)
|
||||
}
|
||||
return connection.QueryResult{
|
||||
Success: false,
|
||||
Message: msg,
|
||||
Data: map[string]any{
|
||||
"logPath": staged.InstallLogPath,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
go func() {
|
||||
time.Sleep(300 * time.Millisecond)
|
||||
wailsRuntime.Quit(a.ctx)
|
||||
// 兜底退出,避免某些平台/窗口状态下 Quit 未真正结束进程,导致更新脚本一直等待。
|
||||
time.Sleep(2 * time.Second)
|
||||
os.Exit(0)
|
||||
}()
|
||||
|
||||
msg := "更新已开始安装"
|
||||
if staged.InstallLogPath != "" {
|
||||
msg = fmt.Sprintf("更新已开始安装,日志路径:%s", staged.InstallLogPath)
|
||||
}
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: msg,
|
||||
Data: map[string]any{
|
||||
"logPath": staged.InstallLogPath,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) downloadAndStageUpdate(info UpdateInfo) connection.QueryResult {
|
||||
workspaceDir := strings.TrimSpace(resolveUpdateWorkspaceDir())
|
||||
if workspaceDir == "" {
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, "无法确定当前应用目录")
|
||||
return connection.QueryResult{Success: false, Message: "无法确定当前应用目录,无法下载更新"}
|
||||
}
|
||||
if err := os.MkdirAll(workspaceDir, 0o755); err != nil {
|
||||
errMsg := fmt.Sprintf("无法访问应用目录:%s", workspaceDir)
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, errMsg)
|
||||
return connection.QueryResult{Success: false, Message: errMsg}
|
||||
}
|
||||
|
||||
// 使用版本号命名的工作目录,便于识别和调试
|
||||
stagedDir := filepath.Join(workspaceDir, fmt.Sprintf(".gonavi-update-%s-%s", stdRuntime.GOOS, info.LatestVersion))
|
||||
// 清理可能残留的旧目录(上次下载失败后未清理)
|
||||
// Windows 上文件可能被杀毒软件/索引服务占用,需要重试
|
||||
for retry := 0; retry < 5; retry++ {
|
||||
err := os.RemoveAll(stagedDir)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
if retry < 4 {
|
||||
time.Sleep(time.Duration(retry+1) * 500 * time.Millisecond)
|
||||
} else {
|
||||
// 最后一次仍然失败,换一个带时间戳的目录名避免冲突
|
||||
stagedDir = filepath.Join(workspaceDir, fmt.Sprintf(".gonavi-update-%s-%s-%d", stdRuntime.GOOS, info.LatestVersion, time.Now().UnixNano()))
|
||||
}
|
||||
}
|
||||
if err := os.MkdirAll(stagedDir, 0o755); err != nil {
|
||||
errMsg := fmt.Sprintf("无法在应用目录创建更新工作目录:%s", stagedDir)
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, errMsg)
|
||||
return connection.QueryResult{Success: false, Message: errMsg}
|
||||
}
|
||||
|
||||
// 下载到 staging 目录,避免覆盖正在运行的可执行文件
|
||||
assetPath := filepath.Join(stagedDir, info.AssetName)
|
||||
actualHash, err := downloadFileWithHash(info.AssetURL, assetPath, func(downloaded, total int64) {
|
||||
reportTotal := total
|
||||
if reportTotal <= 0 {
|
||||
reportTotal = info.AssetSize
|
||||
}
|
||||
a.emitUpdateDownloadProgress("downloading", downloaded, reportTotal, "")
|
||||
})
|
||||
if err != nil {
|
||||
_ = os.Remove(assetPath)
|
||||
_ = os.RemoveAll(stagedDir)
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, err.Error())
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if info.SHA256 == "" {
|
||||
_ = os.Remove(assetPath)
|
||||
_ = os.RemoveAll(stagedDir)
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, "缺少更新包校验值(SHA256SUMS)")
|
||||
return connection.QueryResult{Success: false, Message: "缺少更新包校验值(SHA256SUMS)"}
|
||||
}
|
||||
if !strings.EqualFold(info.SHA256, actualHash) {
|
||||
_ = os.Remove(assetPath)
|
||||
_ = os.RemoveAll(stagedDir)
|
||||
a.emitUpdateDownloadProgress("error", 0, info.AssetSize, "更新包校验失败,请重试")
|
||||
return connection.QueryResult{Success: false, Message: "更新包校验失败,请重试"}
|
||||
}
|
||||
|
||||
staged := &stagedUpdate{
|
||||
Version: info.LatestVersion,
|
||||
AssetName: info.AssetName,
|
||||
FilePath: assetPath,
|
||||
StagedDir: stagedDir,
|
||||
InstallLogPath: buildUpdateInstallLogPath(workspaceDir),
|
||||
}
|
||||
a.updateMu.Lock()
|
||||
a.updateState.staged = staged
|
||||
a.updateMu.Unlock()
|
||||
|
||||
a.emitUpdateDownloadProgress("done", info.AssetSize, info.AssetSize, "")
|
||||
return connection.QueryResult{Success: true, Message: "更新包下载完成", Data: buildUpdateDownloadResult(info, staged)}
|
||||
}
|
||||
|
||||
func fetchLatestUpdateInfo() (UpdateInfo, error) {
|
||||
release, err := fetchLatestRelease()
|
||||
if err != nil {
|
||||
return UpdateInfo{}, err
|
||||
}
|
||||
|
||||
currentVersion := getCurrentVersion()
|
||||
latestVersion := normalizeVersion(release.TagName)
|
||||
if latestVersion == "" {
|
||||
return UpdateInfo{}, errors.New("无法解析最新版本号")
|
||||
}
|
||||
|
||||
assetName, err := expectedAssetName(stdRuntime.GOOS, stdRuntime.GOARCH)
|
||||
if err != nil {
|
||||
return UpdateInfo{}, err
|
||||
}
|
||||
asset, err := findReleaseAsset(release.Assets, assetName)
|
||||
if err != nil {
|
||||
return UpdateInfo{}, err
|
||||
}
|
||||
|
||||
hashMap, err := fetchReleaseSHA256(release.Assets)
|
||||
if err != nil {
|
||||
return UpdateInfo{}, err
|
||||
}
|
||||
sha256Value := strings.TrimSpace(hashMap[assetName])
|
||||
if sha256Value == "" {
|
||||
return UpdateInfo{}, errors.New("SHA256SUMS 未包含当前平台更新包")
|
||||
}
|
||||
|
||||
hasUpdate := compareVersion(currentVersion, latestVersion) < 0
|
||||
|
||||
return UpdateInfo{
|
||||
HasUpdate: hasUpdate,
|
||||
CurrentVersion: currentVersion,
|
||||
LatestVersion: latestVersion,
|
||||
ReleaseName: release.Name,
|
||||
ReleaseNotesURL: release.HTMLURL,
|
||||
AssetName: asset.Name,
|
||||
AssetURL: asset.BrowserDownloadURL,
|
||||
AssetSize: asset.Size,
|
||||
SHA256: sha256Value,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func getCurrentAuthor() string {
|
||||
if env := strings.TrimSpace(os.Getenv("GONAVI_AUTHOR")); env != "" {
|
||||
return env
|
||||
}
|
||||
parts := strings.Split(updateRepo, "/")
|
||||
if len(parts) > 0 {
|
||||
return parts[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func fetchLatestRelease() (*githubRelease, error) {
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
req, err := http.NewRequest(http.MethodGet, updateAPIURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "GoNavi-Updater")
|
||||
req.Header.Set("Accept", "application/vnd.github+json")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("检查更新失败:HTTP %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var release githubRelease
|
||||
if err := json.NewDecoder(resp.Body).Decode(&release); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &release, nil
|
||||
}
|
||||
|
||||
func expectedAssetName(goos, goarch string) (string, error) {
|
||||
switch goos {
|
||||
case "windows":
|
||||
if goarch == "amd64" {
|
||||
return "GoNavi-windows-amd64.exe", nil
|
||||
}
|
||||
if goarch == "arm64" {
|
||||
return "GoNavi-windows-arm64.exe", nil
|
||||
}
|
||||
case "darwin":
|
||||
if goarch == "amd64" {
|
||||
return "GoNavi-mac-amd64.dmg", nil
|
||||
}
|
||||
if goarch == "arm64" {
|
||||
return "GoNavi-mac-arm64.dmg", nil
|
||||
}
|
||||
case "linux":
|
||||
if goarch == "amd64" {
|
||||
return "GoNavi-linux-amd64.tar.gz", nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("当前平台暂不支持在线更新:%s/%s", goos, goarch)
|
||||
}
|
||||
|
||||
func findReleaseAsset(assets []githubAsset, name string) (*githubAsset, error) {
|
||||
for _, asset := range assets {
|
||||
if asset.Name == name {
|
||||
return &asset, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("未找到更新包:%s", name)
|
||||
}
|
||||
|
||||
func fetchReleaseSHA256(assets []githubAsset) (map[string]string, error) {
|
||||
var checksumURL string
|
||||
for _, asset := range assets {
|
||||
if strings.EqualFold(asset.Name, updateChecksumAsset) || strings.Contains(strings.ToLower(asset.Name), "sha256sums") {
|
||||
checksumURL = asset.BrowserDownloadURL
|
||||
break
|
||||
}
|
||||
}
|
||||
if checksumURL == "" {
|
||||
return nil, errors.New("Release 未提供 SHA256SUMS")
|
||||
}
|
||||
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
req, err := http.NewRequest(http.MethodGet, checksumURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", "GoNavi-Updater")
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("下载 SHA256SUMS 失败:HTTP %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parseSHA256Sums(string(body)), nil
|
||||
}
|
||||
|
||||
func parseSHA256Sums(content string) map[string]string {
|
||||
result := make(map[string]string)
|
||||
lines := strings.Split(content, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) < 2 {
|
||||
continue
|
||||
}
|
||||
hash := fields[0]
|
||||
name := fields[len(fields)-1]
|
||||
name = strings.TrimPrefix(name, "*")
|
||||
name = strings.TrimPrefix(name, "./")
|
||||
result[name] = hash
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type downloadProgressWriter struct {
|
||||
total int64
|
||||
written int64
|
||||
lastEmit time.Time
|
||||
emitEvery time.Duration
|
||||
onProgress func(downloaded, total int64)
|
||||
}
|
||||
|
||||
func (w *downloadProgressWriter) Write(p []byte) (int, error) {
|
||||
n := len(p)
|
||||
if n == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
w.written += int64(n)
|
||||
if w.onProgress == nil {
|
||||
return n, nil
|
||||
}
|
||||
now := time.Now()
|
||||
if w.lastEmit.IsZero() || now.Sub(w.lastEmit) >= w.emitEvery || (w.total > 0 && w.written >= w.total) {
|
||||
w.lastEmit = now
|
||||
w.onProgress(w.written, w.total)
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func downloadFileWithHash(url, filePath string, onProgress func(downloaded, total int64)) (string, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Minute}
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("User-Agent", "GoNavi-Updater")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", fmt.Errorf("下载更新包失败:HTTP %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
// Windows 上旧文件可能被杀毒软件/索引服务占用,先尝试删除并重试
|
||||
_ = os.Remove(filePath)
|
||||
var out *os.File
|
||||
for retry := 0; retry < 5; retry++ {
|
||||
out, err = os.Create(filePath)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
if retry < 4 {
|
||||
time.Sleep(time.Duration(retry+1) * 500 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("更新下载失败,文件被占用:%w", err)
|
||||
}
|
||||
|
||||
hasher := sha256.New()
|
||||
total := resp.ContentLength
|
||||
progressWriter := &downloadProgressWriter{
|
||||
total: total,
|
||||
emitEvery: 120 * time.Millisecond,
|
||||
onProgress: onProgress,
|
||||
}
|
||||
writers := []io.Writer{out, hasher, progressWriter}
|
||||
if onProgress != nil {
|
||||
onProgress(0, total)
|
||||
}
|
||||
if _, err := io.Copy(io.MultiWriter(writers...), resp.Body); err != nil {
|
||||
out.Close()
|
||||
return "", err
|
||||
}
|
||||
if onProgress != nil {
|
||||
onProgress(progressWriter.written, total)
|
||||
}
|
||||
|
||||
// 显式 Sync + Close,确保数据落盘且文件句柄释放
|
||||
if err := out.Sync(); err != nil {
|
||||
out.Close()
|
||||
return "", err
|
||||
}
|
||||
if err := out.Close(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return hex.EncodeToString(hasher.Sum(nil)), nil
|
||||
}
|
||||
|
||||
func buildUpdateDownloadResult(info UpdateInfo, staged *stagedUpdate) updateDownloadResult {
|
||||
result := updateDownloadResult{
|
||||
Info: info,
|
||||
Platform: stdRuntime.GOOS,
|
||||
InstallTarget: resolveUpdateInstallTarget(),
|
||||
AutoRelaunch: true,
|
||||
}
|
||||
if staged != nil {
|
||||
result.DownloadPath = staged.FilePath
|
||||
result.InstallLogPath = staged.InstallLogPath
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func buildUpdateInstallLogPath(baseDir string) string {
|
||||
platform := stdRuntime.GOOS
|
||||
if platform == "darwin" {
|
||||
platform = "macos"
|
||||
}
|
||||
logDir := strings.TrimSpace(baseDir)
|
||||
if logDir == "" {
|
||||
logDir = os.TempDir()
|
||||
}
|
||||
return filepath.Join(logDir, fmt.Sprintf("gonavi-update-%s-%d.log", platform, time.Now().UnixNano()))
|
||||
}
|
||||
|
||||
func resolveUpdateWorkspaceDir() string {
|
||||
// 使用系统临时目录作为更新工作区,避免以下问题:
|
||||
// 1. Windows: exe 所在目录可能被杀毒软件/索引服务锁定,或缺少写权限(如 Program Files)
|
||||
// 2. macOS: /Applications 需要管理员权限才能写入
|
||||
// 3. 运行中的 exe 文件锁与 staging 文件冲突
|
||||
dir := filepath.Join(os.TempDir(), "gonavi-updates")
|
||||
_ = os.MkdirAll(dir, 0o755)
|
||||
return dir
|
||||
}
|
||||
|
||||
func resolveUpdateInstallTarget() string {
|
||||
exePath, err := os.Executable()
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
exePath, _ = filepath.EvalSymlinks(exePath)
|
||||
if stdRuntime.GOOS == "darwin" {
|
||||
return resolveMacUpdateTarget(exePath)
|
||||
}
|
||||
return exePath
|
||||
}
|
||||
|
||||
func (a *App) emitUpdateDownloadProgress(status string, downloaded, total int64, message string) {
|
||||
if a.ctx == nil {
|
||||
return
|
||||
}
|
||||
payload := updateDownloadProgressPayload{
|
||||
Status: status,
|
||||
Percent: 0,
|
||||
Downloaded: downloaded,
|
||||
Total: total,
|
||||
Message: strings.TrimSpace(message),
|
||||
}
|
||||
if total > 0 {
|
||||
payload.Percent = math.Min(100, (float64(downloaded)/float64(total))*100)
|
||||
}
|
||||
if status == "done" && payload.Percent < 100 {
|
||||
payload.Percent = 100
|
||||
}
|
||||
wailsRuntime.EventsEmit(a.ctx, updateDownloadProgressEvent, payload)
|
||||
}
|
||||
|
||||
func launchUpdateScript(staged *stagedUpdate) error {
|
||||
exePath, err := os.Executable()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
exePath, _ = filepath.EvalSymlinks(exePath)
|
||||
pid := os.Getpid()
|
||||
|
||||
switch stdRuntime.GOOS {
|
||||
case "windows":
|
||||
return launchWindowsUpdate(staged, exePath, pid)
|
||||
case "darwin":
|
||||
return launchMacUpdate(staged, exePath, pid)
|
||||
case "linux":
|
||||
return launchLinuxUpdate(staged, exePath, pid)
|
||||
default:
|
||||
return fmt.Errorf("当前平台暂不支持更新安装:%s", stdRuntime.GOOS)
|
||||
}
|
||||
}
|
||||
|
||||
func launchWindowsUpdate(staged *stagedUpdate, targetExe string, pid int) error {
|
||||
scriptPath := filepath.Join(staged.StagedDir, "update.cmd")
|
||||
logPath := strings.TrimSpace(staged.InstallLogPath)
|
||||
if logPath == "" {
|
||||
logPath = buildUpdateInstallLogPath(filepath.Dir(staged.FilePath))
|
||||
staged.InstallLogPath = logPath
|
||||
}
|
||||
content := buildWindowsScript(staged.FilePath, targetExe, staged.StagedDir, logPath, pid)
|
||||
if err := os.WriteFile(scriptPath, []byte(content), 0o644); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Infof("启动 Windows 更新脚本:target=%s script=%s log=%s", targetExe, scriptPath, logPath)
|
||||
cmd := exec.Command("cmd", "/C", "start", "", scriptPath)
|
||||
return cmd.Start()
|
||||
}
|
||||
|
||||
func launchMacUpdate(staged *stagedUpdate, targetExe string, pid int) error {
|
||||
targetApp := resolveMacUpdateTarget(targetExe)
|
||||
mountDir := filepath.Join(staged.StagedDir, "mnt")
|
||||
if err := os.MkdirAll(mountDir, 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
logPath := strings.TrimSpace(staged.InstallLogPath)
|
||||
if logPath == "" {
|
||||
logPath = buildUpdateInstallLogPath(filepath.Dir(staged.FilePath))
|
||||
staged.InstallLogPath = logPath
|
||||
}
|
||||
|
||||
scriptPath := filepath.Join(staged.StagedDir, "update.sh")
|
||||
content := buildMacScript(staged.FilePath, targetApp, staged.StagedDir, mountDir, logPath, pid)
|
||||
if err := os.WriteFile(scriptPath, []byte(content), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cmd := exec.Command("/bin/bash", scriptPath)
|
||||
logger.Infof("启动 macOS 更新脚本:target=%s script=%s log=%s", targetApp, scriptPath, logPath)
|
||||
return cmd.Start()
|
||||
}
|
||||
|
||||
func launchLinuxUpdate(staged *stagedUpdate, targetExe string, pid int) error {
|
||||
scriptPath := filepath.Join(staged.StagedDir, "update.sh")
|
||||
content := buildLinuxScript(staged.FilePath, targetExe, staged.StagedDir, pid)
|
||||
if err := os.WriteFile(scriptPath, []byte(content), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cmd := exec.Command("/bin/sh", scriptPath)
|
||||
return cmd.Start()
|
||||
}
|
||||
|
||||
func buildWindowsScript(source, target, stagedDir, logPath string, pid int) string {
|
||||
return fmt.Sprintf(`@echo off
|
||||
setlocal EnableExtensions EnableDelayedExpansion
|
||||
set "SOURCE=%s"
|
||||
set "TARGET=%s"
|
||||
set "STAGED=%s"
|
||||
set "LOG_FILE=%s"
|
||||
set PID=%d
|
||||
|
||||
call :log updater started
|
||||
if not exist "%%SOURCE%%" (
|
||||
call :log source file not found: %%SOURCE%%
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:waitloop
|
||||
tasklist /FI "PID eq %%PID%%" | find "%%PID%%" >nul
|
||||
if %%ERRORLEVEL%%==0 (
|
||||
timeout /t 1 /nobreak >nul
|
||||
goto waitloop
|
||||
)
|
||||
call :log host process exited
|
||||
|
||||
set /a RETRY=0
|
||||
:move_retry
|
||||
move /Y "%%SOURCE%%" "%%TARGET%%" >> "%%LOG_FILE%%" 2>&1
|
||||
if %%ERRORLEVEL%%==0 goto move_done
|
||||
|
||||
copy /Y "%%SOURCE%%" "%%TARGET%%" >> "%%LOG_FILE%%" 2>&1
|
||||
if %%ERRORLEVEL%%==0 goto move_done
|
||||
|
||||
set /a RETRY+=1
|
||||
if !RETRY! LSS 20 (
|
||||
timeout /t 1 /nobreak >nul
|
||||
goto move_retry
|
||||
)
|
||||
|
||||
call :log replace failed after retries (portable mode, no elevation): check directory write permission or file lock
|
||||
exit /b 1
|
||||
|
||||
:move_done
|
||||
start "" "%%TARGET%%" >> "%%LOG_FILE%%" 2>&1
|
||||
if %%ERRORLEVEL%% NEQ 0 (
|
||||
call :log cmd start failed, trying powershell Start-Process
|
||||
powershell -NoProfile -ExecutionPolicy Bypass -Command "Start-Process -FilePath '%%TARGET%%'" >> "%%LOG_FILE%%" 2>&1
|
||||
if %%ERRORLEVEL%% NEQ 0 (
|
||||
call :log relaunch failed
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
rmdir /S /Q "%%STAGED%%" >> "%%LOG_FILE%%" 2>&1
|
||||
call :log update finished
|
||||
exit /b 0
|
||||
|
||||
:log
|
||||
echo [%%date%% %%time%%] %%*>>"%%LOG_FILE%%"
|
||||
exit /b 0
|
||||
`, source, target, stagedDir, logPath, pid)
|
||||
}
|
||||
|
||||
func buildMacScript(dmgPath, targetApp, stagedDir, mountDir, logPath string, pid int) string {
|
||||
return fmt.Sprintf(`#!/bin/bash
|
||||
set -euo pipefail
|
||||
PID=%d
|
||||
DMG="%s"
|
||||
TARGET_APP="%s"
|
||||
STAGED="%s"
|
||||
MOUNT_DIR="%s"
|
||||
LOG_FILE="%s"
|
||||
TMP_APP="${TARGET_APP}.new"
|
||||
BACKUP_APP="${TARGET_APP}.backup"
|
||||
APP_BIN_NAME=$(basename "$TARGET_APP" .app)
|
||||
APP_BIN_REL="Contents/MacOS/$APP_BIN_NAME"
|
||||
|
||||
log() {
|
||||
echo "[$(date '+%%Y-%%m-%%d %%H:%%M:%%S')] $*" >> "$LOG_FILE"
|
||||
}
|
||||
|
||||
run_admin_replace() {
|
||||
/usr/bin/osascript <<'APPLESCRIPT' "$APP_SRC" "$TARGET_APP" "$TMP_APP" "$BACKUP_APP" "$APP_BIN_REL" "$LOG_FILE"
|
||||
on run argv
|
||||
set srcPath to item 1 of argv
|
||||
set dstPath to item 2 of argv
|
||||
set tmpPath to item 3 of argv
|
||||
set bakPath to item 4 of argv
|
||||
set binRel to item 5 of argv
|
||||
set logPath to item 6 of argv
|
||||
set cmd to "set -eu; " & ¬
|
||||
"rm -rf " & quoted form of tmpPath & " " & quoted form of bakPath & "; " & ¬
|
||||
"/usr/bin/ditto " & quoted form of srcPath & " " & quoted form of tmpPath & "; " & ¬
|
||||
"if [ ! -x " & quoted form of (tmpPath & "/" & binRel) & " ]; then echo 'tmp app binary missing' >> " & quoted form of logPath & "; exit 1; fi; " & ¬
|
||||
"xattr -rd com.apple.quarantine " & quoted form of tmpPath & " >> " & quoted form of logPath & " 2>&1 || true; " & ¬
|
||||
"if [ -d " & quoted form of dstPath & " ]; then mv " & quoted form of dstPath & " " & quoted form of bakPath & "; fi; " & ¬
|
||||
"mv " & quoted form of tmpPath & " " & quoted form of dstPath & "; " & ¬
|
||||
"rm -rf " & quoted form of bakPath & "; " & ¬
|
||||
"xattr -rd com.apple.quarantine " & quoted form of dstPath & " >> " & quoted form of logPath & " 2>&1 || true"
|
||||
do shell script cmd with administrator privileges
|
||||
end run
|
||||
APPLESCRIPT
|
||||
}
|
||||
|
||||
replace_app_direct() {
|
||||
rm -rf "$TMP_APP" "$BACKUP_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
/usr/bin/ditto "$APP_SRC" "$TMP_APP" >>"$LOG_FILE" 2>&1
|
||||
if [ ! -x "$TMP_APP/$APP_BIN_REL" ]; then
|
||||
log "tmp app binary missing: $TMP_APP/$APP_BIN_REL"
|
||||
return 1
|
||||
fi
|
||||
xattr -rd com.apple.quarantine "$TMP_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
if [ -d "$TARGET_APP" ]; then
|
||||
mv "$TARGET_APP" "$BACKUP_APP" >>"$LOG_FILE" 2>&1
|
||||
fi
|
||||
if ! mv "$TMP_APP" "$TARGET_APP" >>"$LOG_FILE" 2>&1; then
|
||||
log "move new app failed, trying rollback"
|
||||
rm -rf "$TARGET_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
if [ -d "$BACKUP_APP" ]; then
|
||||
mv "$BACKUP_APP" "$TARGET_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
rm -rf "$BACKUP_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
xattr -rd com.apple.quarantine "$TARGET_APP" >>"$LOG_FILE" 2>&1 || true
|
||||
return 0
|
||||
}
|
||||
|
||||
relaunch_app() {
|
||||
if /usr/bin/open -n "$TARGET_APP" >>"$LOG_FILE" 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
log "open -n failed, trying binary launch"
|
||||
"$TARGET_APP/$APP_BIN_REL" >>"$LOG_FILE" 2>&1 &
|
||||
return 0
|
||||
}
|
||||
|
||||
log "updater started"
|
||||
while kill -0 $PID 2>/dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
log "host process exited"
|
||||
hdiutil attach "$DMG" -nobrowse -quiet -mountpoint "$MOUNT_DIR" >>"$LOG_FILE" 2>&1
|
||||
APP_SRC=$(ls "$MOUNT_DIR"/*.app 2>/dev/null | head -n 1 || true)
|
||||
if [ -z "$APP_SRC" ]; then
|
||||
log "no .app found inside dmg"
|
||||
hdiutil detach "$MOUNT_DIR" -quiet >>"$LOG_FILE" 2>&1 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "install target: $TARGET_APP"
|
||||
if ! replace_app_direct; then
|
||||
log "direct replace failed, trying admin replace"
|
||||
run_admin_replace >>"$LOG_FILE" 2>&1
|
||||
fi
|
||||
|
||||
if [ ! -x "$TARGET_APP/$APP_BIN_REL" ]; then
|
||||
log "target app binary missing after replace: $TARGET_APP/$APP_BIN_REL"
|
||||
hdiutil detach "$MOUNT_DIR" -quiet >>"$LOG_FILE" 2>&1 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
hdiutil detach "$MOUNT_DIR" -quiet >>"$LOG_FILE" 2>&1 || true
|
||||
rm -rf "$MOUNT_DIR" "$DMG" "$STAGED" >>"$LOG_FILE" 2>&1 || true
|
||||
relaunch_app
|
||||
log "relaunch requested"
|
||||
`, pid, dmgPath, targetApp, stagedDir, mountDir, logPath)
|
||||
}
|
||||
|
||||
func buildLinuxScript(tarPath, targetExe, stagedDir string, pid int) string {
|
||||
return fmt.Sprintf(`#!/bin/bash
|
||||
set -e
|
||||
PID=%d
|
||||
ARCHIVE="%s"
|
||||
TARGET="%s"
|
||||
STAGED="%s"
|
||||
while kill -0 $PID 2>/dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
TMPDIR=$(mktemp -d)
|
||||
tar -xzf "$ARCHIVE" -C "$TMPDIR"
|
||||
NEWBIN="$TMPDIR/GoNavi"
|
||||
if [ ! -f "$NEWBIN" ]; then
|
||||
NEWBIN=$(find "$TMPDIR" -type f -name "GoNavi" | head -n 1)
|
||||
fi
|
||||
if [ -z "$NEWBIN" ] || [ ! -f "$NEWBIN" ]; then
|
||||
exit 1
|
||||
fi
|
||||
cp -f "$NEWBIN" "$TARGET"
|
||||
chmod +x "$TARGET"
|
||||
rm -rf "$TMPDIR" "$ARCHIVE" "$STAGED"
|
||||
"$TARGET" &
|
||||
`, pid, tarPath, targetExe, stagedDir)
|
||||
}
|
||||
|
||||
func detectMacAppPath(exePath string) string {
|
||||
parts := strings.Split(exePath, string(filepath.Separator))
|
||||
for i := len(parts) - 1; i >= 0; i-- {
|
||||
if strings.HasSuffix(parts[i], ".app") {
|
||||
appPath := filepath.Join(parts[:i+1]...)
|
||||
// 确保返回绝对路径
|
||||
if !filepath.IsAbs(appPath) {
|
||||
appPath = string(filepath.Separator) + appPath
|
||||
}
|
||||
return appPath
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func resolveMacUpdateTarget(exePath string) string {
|
||||
targetApp := detectMacAppPath(exePath)
|
||||
if targetApp == "" {
|
||||
return "/Applications/GoNavi.app"
|
||||
}
|
||||
targetApp = filepath.Clean(targetApp)
|
||||
// Gatekeeper App Translocation 路径不可用于稳定覆盖更新,统一回退到 /Applications。
|
||||
if strings.Contains(targetApp, string(filepath.Separator)+"AppTranslocation"+string(filepath.Separator)) {
|
||||
logger.Warnf("检测到 AppTranslocation 运行路径,更新目标回退至 /Applications/GoNavi.app:%s", targetApp)
|
||||
return "/Applications/GoNavi.app"
|
||||
}
|
||||
return targetApp
|
||||
}
|
||||
|
||||
func normalizeVersion(version string) string {
|
||||
version = strings.TrimSpace(version)
|
||||
version = strings.TrimPrefix(version, "v")
|
||||
return version
|
||||
}
|
||||
|
||||
func compareVersion(current, latest string) int {
|
||||
current = normalizeVersion(current)
|
||||
latest = normalizeVersion(latest)
|
||||
if current == "" {
|
||||
return -1
|
||||
}
|
||||
if current == latest {
|
||||
return 0
|
||||
}
|
||||
|
||||
curParts := splitVersionParts(current)
|
||||
latParts := splitVersionParts(latest)
|
||||
max := len(curParts)
|
||||
if len(latParts) > max {
|
||||
max = len(latParts)
|
||||
}
|
||||
for i := 0; i < max; i++ {
|
||||
cur := 0
|
||||
lat := 0
|
||||
if i < len(curParts) {
|
||||
cur = curParts[i]
|
||||
}
|
||||
if i < len(latParts) {
|
||||
lat = latParts[i]
|
||||
}
|
||||
if cur < lat {
|
||||
return -1
|
||||
}
|
||||
if cur > lat {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func splitVersionParts(version string) []int {
|
||||
parts := strings.Split(version, ".")
|
||||
result := make([]int, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
result = append(result, 0)
|
||||
continue
|
||||
}
|
||||
num := 0
|
||||
for _, ch := range part {
|
||||
if ch < '0' || ch > '9' {
|
||||
break
|
||||
}
|
||||
num = num*10 + int(ch-'0')
|
||||
}
|
||||
result = append(result, num)
|
||||
}
|
||||
return result
|
||||
}
|
||||
236
internal/app/sql_sanitize.go
Normal file
236
internal/app/sql_sanitize.go
Normal file
@@ -0,0 +1,236 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
func sanitizeSQLForPgLike(dbType string, query string) string {
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
// 有些情况下会出现多层重复引用(例如 """"schema"""" 或 ""schema"""),单次修复不一定收敛。
|
||||
// 这里做有限次数的迭代,直到输出不再变化。
|
||||
out := query
|
||||
for i := 0; i < 3; i++ {
|
||||
fixed := fixBrokenDoubleDoubleQuotedIdent(out)
|
||||
if fixed == out {
|
||||
break
|
||||
}
|
||||
out = fixed
|
||||
}
|
||||
return out
|
||||
default:
|
||||
return query
|
||||
}
|
||||
}
|
||||
|
||||
// fixBrokenDoubleDoubleQuotedIdent fixes accidental identifiers like:
|
||||
//
|
||||
// SELECT * FROM ""schema"".""table""
|
||||
//
|
||||
// which can be produced when a quoted identifier gets wrapped by quotes again.
|
||||
//
|
||||
// It is intentionally conservative:
|
||||
// - only runs outside strings/comments/dollar-quoted blocks
|
||||
// - does not touch valid escaped-quote sequences inside quoted identifiers (e.g. "a""b")
|
||||
func fixBrokenDoubleDoubleQuotedIdent(query string) string {
|
||||
if !strings.Contains(query, `""`) {
|
||||
return query
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.Grow(len(query))
|
||||
|
||||
inSingle := false
|
||||
inDoubleIdent := false
|
||||
inLineComment := false
|
||||
inBlockComment := false
|
||||
dollarTag := ""
|
||||
|
||||
for i := 0; i < len(query); i++ {
|
||||
ch := query[i]
|
||||
next := byte(0)
|
||||
if i+1 < len(query) {
|
||||
next = query[i+1]
|
||||
}
|
||||
|
||||
if inLineComment {
|
||||
b.WriteByte(ch)
|
||||
if ch == '\n' {
|
||||
inLineComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if inBlockComment {
|
||||
b.WriteByte(ch)
|
||||
if ch == '*' && next == '/' {
|
||||
b.WriteByte('/')
|
||||
i++
|
||||
inBlockComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if dollarTag != "" {
|
||||
if strings.HasPrefix(query[i:], dollarTag) {
|
||||
b.WriteString(dollarTag)
|
||||
i += len(dollarTag) - 1
|
||||
dollarTag = ""
|
||||
continue
|
||||
}
|
||||
b.WriteByte(ch)
|
||||
continue
|
||||
}
|
||||
if inSingle {
|
||||
b.WriteByte(ch)
|
||||
if ch == '\'' {
|
||||
// escaped single quote
|
||||
if next == '\'' {
|
||||
b.WriteByte('\'')
|
||||
i++
|
||||
continue
|
||||
}
|
||||
inSingle = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if inDoubleIdent {
|
||||
b.WriteByte(ch)
|
||||
if ch == '"' {
|
||||
// escaped quote inside identifier
|
||||
if next == '"' {
|
||||
b.WriteByte('"')
|
||||
i++
|
||||
continue
|
||||
}
|
||||
inDoubleIdent = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Outside of all string/comment blocks ---
|
||||
if ch == '-' && next == '-' {
|
||||
b.WriteByte(ch)
|
||||
b.WriteByte('-')
|
||||
i++
|
||||
inLineComment = true
|
||||
continue
|
||||
}
|
||||
if ch == '/' && next == '*' {
|
||||
b.WriteByte(ch)
|
||||
b.WriteByte('*')
|
||||
i++
|
||||
inBlockComment = true
|
||||
continue
|
||||
}
|
||||
if ch == '\'' {
|
||||
b.WriteByte(ch)
|
||||
inSingle = true
|
||||
continue
|
||||
}
|
||||
if ch == '$' {
|
||||
if tag := parseDollarTag(query[i:]); tag != "" {
|
||||
b.WriteString(tag)
|
||||
i += len(tag) - 1
|
||||
dollarTag = tag
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if ch == '"' {
|
||||
// Fix: ""ident"" -> "ident" (only when it looks like a plain identifier)
|
||||
// Also handle variants like ""ident""" / """"ident"""" (extra quotes at either side).
|
||||
if next == '"' {
|
||||
if replacement, advance, ok := tryFixDoubleDoubleQuotedIdent(query, i); ok {
|
||||
b.WriteString(replacement)
|
||||
i = advance - 1
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
b.WriteByte(ch)
|
||||
inDoubleIdent = true
|
||||
continue
|
||||
}
|
||||
|
||||
b.WriteByte(ch)
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func tryFixDoubleDoubleQuotedIdent(query string, start int) (replacement string, advance int, ok bool) {
|
||||
// start points at the first quote of a broken identifier, usually like:
|
||||
// ""ident"" / ""ident""" / """"ident""""
|
||||
if start < 0 || start+1 >= len(query) {
|
||||
return "", 0, false
|
||||
}
|
||||
if query[start] != '"' || query[start+1] != '"' {
|
||||
return "", 0, false
|
||||
}
|
||||
if start > 0 && query[start-1] == '"' {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
runLen := 0
|
||||
for start+runLen < len(query) && query[start+runLen] == '"' {
|
||||
runLen++
|
||||
}
|
||||
if runLen < 2 || runLen%2 == 1 {
|
||||
// Odd run (e.g. """...) can be a valid quoted identifier with escaped quotes.
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
contentStart := start + runLen
|
||||
j := contentStart
|
||||
for j < len(query) {
|
||||
if query[j] == '"' {
|
||||
endRunLen := 0
|
||||
for j+endRunLen < len(query) && query[j+endRunLen] == '"' {
|
||||
endRunLen++
|
||||
}
|
||||
if endRunLen >= 2 {
|
||||
content := strings.TrimSpace(query[contentStart:j])
|
||||
if looksLikeIdentifierContent(content) {
|
||||
return `"` + content + `"`, j + endRunLen, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
}
|
||||
// Fast abort: identifier-like content should not span lines.
|
||||
if query[j] == '\n' || query[j] == '\r' {
|
||||
break
|
||||
}
|
||||
j++
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
func looksLikeIdentifierContent(s string) bool {
|
||||
if strings.TrimSpace(s) == "" {
|
||||
return false
|
||||
}
|
||||
for _, r := range s {
|
||||
if r == '_' || r == '$' || r == '-' || unicode.IsLetter(r) || unicode.IsDigit(r) {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func parseDollarTag(s string) string {
|
||||
// Match: $tag$ where tag is [A-Za-z0-9_]* (can be empty => $$)
|
||||
if len(s) < 2 || s[0] != '$' {
|
||||
return ""
|
||||
}
|
||||
for i := 1; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c == '$' {
|
||||
return s[:i+1]
|
||||
}
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_') {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
55
internal/app/sql_sanitize_test.go
Normal file
55
internal/app/sql_sanitize_test.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package app
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestSanitizeSQLForPgLike_FixesBrokenDoubleDoubleQuotes(t *testing.T) {
|
||||
in := `SELECT * FROM ""ldf_server"".""t_user"" LIMIT 1`
|
||||
out := sanitizeSQLForPgLike("kingbase", in)
|
||||
want := `SELECT * FROM "ldf_server"."t_user" LIMIT 1`
|
||||
if out != want {
|
||||
t.Fatalf("unexpected sanitize output:\nIN: %s\nOUT: %s\nWANT: %s", in, out, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeSQLForPgLike_FixesBrokenDoubleDoubleQuotes_WithExtraQuotes(t *testing.T) {
|
||||
in := `SELECT * FROM ""ldf_server""".""t_user"" LIMIT 1`
|
||||
out := sanitizeSQLForPgLike("kingbase", in)
|
||||
want := `SELECT * FROM "ldf_server"."t_user" LIMIT 1`
|
||||
if out != want {
|
||||
t.Fatalf("unexpected sanitize output:\nIN: %s\nOUT: %s\nWANT: %s", in, out, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeSQLForPgLike_FixesBrokenDoubleDoubleQuotes_WithQuadQuotes(t *testing.T) {
|
||||
in := `SELECT * FROM """"ldf_server"""".""t_user"" LIMIT 1`
|
||||
out := sanitizeSQLForPgLike("kingbase", in)
|
||||
want := `SELECT * FROM "ldf_server"."t_user" LIMIT 1`
|
||||
if out != want {
|
||||
t.Fatalf("unexpected sanitize output:\nIN: %s\nOUT: %s\nWANT: %s", in, out, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeSQLForPgLike_DoesNotTouchEscapedQuotesInsideIdentifier(t *testing.T) {
|
||||
in := `SELECT "a""b" FROM "t""x"`
|
||||
out := sanitizeSQLForPgLike("postgres", in)
|
||||
if out != in {
|
||||
t.Fatalf("should keep valid escaped quotes inside identifier:\nIN: %s\nOUT: %s", in, out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeSQLForPgLike_DoesNotTouchDollarQuotedStrings(t *testing.T) {
|
||||
in := "SELECT $$\"\"ldf_server\"\"$$, \"\"ldf_server\"\""
|
||||
out := sanitizeSQLForPgLike("postgres", in)
|
||||
want := "SELECT $$\"\"ldf_server\"\"$$, \"ldf_server\""
|
||||
if out != want {
|
||||
t.Fatalf("unexpected sanitize output for dollar quoted string:\nIN: %s\nOUT: %s\nWANT: %s", in, out, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeSQLForPgLike_DoesNotModifyOtherDBTypes(t *testing.T) {
|
||||
in := `SELECT * FROM ""ldf_server""`
|
||||
out := sanitizeSQLForPgLike("mysql", in)
|
||||
if out != in {
|
||||
t.Fatalf("non-PG-like db should not be sanitized:\nIN: %s\nOUT: %s", in, out)
|
||||
}
|
||||
}
|
||||
53
internal/app/version.go
Normal file
53
internal/app/version.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var AppVersion = "0.0.0"
|
||||
var AppBuildTime = ""
|
||||
|
||||
func getCurrentVersion() string {
|
||||
version := strings.TrimSpace(AppVersion)
|
||||
if version == "" || version == "0.0.0" {
|
||||
if env := strings.TrimSpace(os.Getenv("GONAVI_VERSION")); env != "" {
|
||||
version = env
|
||||
} else if pkgVersion, err := readPackageVersion(); err == nil && pkgVersion != "" {
|
||||
version = pkgVersion
|
||||
}
|
||||
}
|
||||
return normalizeVersion(version)
|
||||
}
|
||||
|
||||
func readPackageVersion() (string, error) {
|
||||
paths := []string{
|
||||
filepath.Join("frontend", "package.json"),
|
||||
}
|
||||
exe, err := os.Executable()
|
||||
if err == nil {
|
||||
base := filepath.Dir(exe)
|
||||
paths = append(paths, filepath.Join(base, "frontend", "package.json"))
|
||||
paths = append(paths, filepath.Join(base, "..", "frontend", "package.json"))
|
||||
}
|
||||
|
||||
for _, p := range paths {
|
||||
data, err := os.ReadFile(p)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
var payload struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &payload); err != nil {
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(payload.Version) != "" {
|
||||
return strings.TrimSpace(payload.Version), nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", os.ErrNotExist
|
||||
}
|
||||
119
internal/app/window_translucency_darwin.go
Normal file
119
internal/app/window_translucency_darwin.go
Normal file
@@ -0,0 +1,119 @@
|
||||
//go:build darwin
|
||||
|
||||
package app
|
||||
|
||||
/*
|
||||
#cgo CFLAGS: -x objective-c -fblocks
|
||||
#cgo LDFLAGS: -framework Cocoa
|
||||
#import <Cocoa/Cocoa.h>
|
||||
#import <dispatch/dispatch.h>
|
||||
|
||||
static void gonaviTuneWindowTranslucency(NSWindow *window) {
|
||||
if (window == nil) {
|
||||
return;
|
||||
}
|
||||
CGFloat cornerRadius = 14.0;
|
||||
|
||||
[window setOpaque:NO];
|
||||
[window setBackgroundColor:[NSColor clearColor]];
|
||||
[window setHasShadow:YES];
|
||||
[window setMovableByWindowBackground:YES];
|
||||
|
||||
NSView *contentView = [window contentView];
|
||||
if (contentView == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
[contentView setWantsLayer:YES];
|
||||
[[contentView layer] setBackgroundColor:[[NSColor clearColor] CGColor]];
|
||||
[[contentView layer] setCornerRadius:cornerRadius];
|
||||
[[contentView layer] setMasksToBounds:YES];
|
||||
|
||||
NSVisualEffectView *effectView = nil;
|
||||
for (NSView *subview in [contentView subviews]) {
|
||||
if ([subview isKindOfClass:[NSVisualEffectView class]]) {
|
||||
effectView = (NSVisualEffectView *)subview;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (effectView == nil) {
|
||||
effectView = [[NSVisualEffectView alloc] initWithFrame:[contentView bounds]];
|
||||
[effectView setAutoresizingMask:NSViewWidthSizable | NSViewHeightSizable];
|
||||
[contentView addSubview:effectView positioned:NSWindowBelow relativeTo:nil];
|
||||
[effectView release];
|
||||
}
|
||||
|
||||
[effectView setMaterial:NSVisualEffectMaterialHUDWindow];
|
||||
[effectView setBlendingMode:NSVisualEffectBlendingModeBehindWindow];
|
||||
[effectView setState:NSVisualEffectStateActive];
|
||||
// 默认 alpha=0(不可见),由前端根据用户外观设置动态启用
|
||||
[effectView setAlphaValue:0.0];
|
||||
[effectView setWantsLayer:YES];
|
||||
[[effectView layer] setCornerRadius:cornerRadius];
|
||||
[[effectView layer] setMasksToBounds:YES];
|
||||
}
|
||||
|
||||
static void gonaviApplyWindowTranslucencyFix() {
|
||||
// 启动时应用窗口透明度修复,减少重试次数以降低启动期 GPU 负载
|
||||
for (int i = 0; i < 8; i++) {
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(i * 250 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
|
||||
for (NSWindow *window in [NSApp windows]) {
|
||||
gonaviTuneWindowTranslucency(window);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 动态设置 NSVisualEffectView 的透明度和窗口不透明标志。
|
||||
// alpha <= 0 时窗口标记为 opaque,GPU 不再持续计算窗口背后的模糊效果。
|
||||
static void gonaviSetEffectViewAlpha(double alpha) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
for (NSWindow *window in [NSApp windows]) {
|
||||
NSView *contentView = [window contentView];
|
||||
if (contentView == nil) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (NSView *subview in [contentView subviews]) {
|
||||
if ([subview isKindOfClass:[NSVisualEffectView class]]) {
|
||||
NSVisualEffectView *effectView = (NSVisualEffectView *)subview;
|
||||
[effectView setAlphaValue:alpha];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (alpha <= 0.01) {
|
||||
[window setOpaque:YES];
|
||||
} else {
|
||||
[window setOpaque:NO];
|
||||
[window setBackgroundColor:[NSColor clearColor]];
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
func applyMacWindowTranslucencyFix() {
|
||||
C.gonaviApplyWindowTranslucencyFix()
|
||||
}
|
||||
|
||||
// setMacWindowTranslucency 根据用户外观设置动态调整 macOS 窗口透明度。
|
||||
// opacity=1.0 且 blur=0 时关闭 NSVisualEffectView(alpha=0),窗口标记为 opaque,
|
||||
// GPU 不再持续计算窗口背后的模糊合成,显著降低 CPU/GPU 温度。
|
||||
func setMacWindowTranslucency(opacity float64, blur float64) {
|
||||
if opacity >= 0.999 && blur <= 0 {
|
||||
C.gonaviSetEffectViewAlpha(C.double(0.0))
|
||||
} else {
|
||||
// 半透明模式:NSVisualEffectView alpha 根据透明度动态映射
|
||||
alpha := (1.0 - opacity) * 1.2
|
||||
if alpha < 0.3 {
|
||||
alpha = 0.3
|
||||
}
|
||||
if alpha > 0.85 {
|
||||
alpha = 0.85
|
||||
}
|
||||
C.gonaviSetEffectViewAlpha(C.double(alpha))
|
||||
}
|
||||
}
|
||||
7
internal/app/window_translucency_stub.go
Normal file
7
internal/app/window_translucency_stub.go
Normal file
@@ -0,0 +1,7 @@
|
||||
//go:build !darwin
|
||||
|
||||
package app
|
||||
|
||||
func applyMacWindowTranslucencyFix() {}
|
||||
|
||||
func setMacWindowTranslucency(opacity float64, blur float64) {}
|
||||
@@ -11,17 +11,31 @@ type SSHConfig struct {
|
||||
|
||||
// ConnectionConfig holds database connection details including SSH
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
@@ -88,3 +102,12 @@ type ChangeSet struct {
|
||||
Updates []UpdateRow `json:"updates"`
|
||||
Deletes []map[string]interface{} `json:"deletes"`
|
||||
}
|
||||
|
||||
type MongoMemberInfo struct {
|
||||
Host string `json:"host"`
|
||||
Role string `json:"role"`
|
||||
State string `json:"state"`
|
||||
StateCode int `json:"stateCode,omitempty"`
|
||||
Healthy bool `json:"healthy"`
|
||||
IsSelf bool `json:"isSelf,omitempty"`
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
@@ -57,6 +58,20 @@ func (c *CustomDB) Ping() error {
|
||||
return c.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (c *CustomDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := c.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -67,41 +82,18 @@ func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (c *CustomDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *CustomDB) Exec(query string) (int64, error) {
|
||||
@@ -136,13 +128,22 @@ func (c *CustomDB) GetTables(dbName string) ([]string, error) {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
} else if c.driver == "postgres" || c.driver == "kingbase" {
|
||||
if dbName != "" && dbName != "public" {
|
||||
query = fmt.Sprintf("SELECT table_name FROM information_schema.tables WHERE table_schema = '%s'", dbName)
|
||||
query = `
|
||||
SELECT table_schema AS schemaname, table_name AS tablename
|
||||
FROM information_schema.tables
|
||||
WHERE table_type = 'BASE TABLE'
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema')`
|
||||
if dbName != "" {
|
||||
query += fmt.Sprintf(" AND table_schema = '%s'", dbName)
|
||||
}
|
||||
query += " ORDER BY table_schema, table_name"
|
||||
} else if c.driver == "sqlite" {
|
||||
query = "SELECT name FROM sqlite_master WHERE type='table'"
|
||||
} else if c.driver == "oracle" || c.driver == "dm" {
|
||||
query = "SELECT table_name FROM user_tables"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback generic execution
|
||||
@@ -153,6 +154,18 @@ func (c *CustomDB) GetTables(dbName string) ([]string, error) {
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if schema, okSchema := row["schemaname"]; okSchema {
|
||||
if name, okName := row["tablename"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
// iterate keys to find likely column
|
||||
for k, v := range row {
|
||||
if strings.Contains(strings.ToLower(k), "name") || strings.Contains(strings.ToLower(k), "table") {
|
||||
@@ -235,7 +248,141 @@ func (c *CustomDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
}
|
||||
|
||||
func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode for custom")
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := c.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
driver := strings.ToLower(strings.TrimSpace(c.driver))
|
||||
isMySQL := strings.Contains(driver, "mysql")
|
||||
isPostgres := strings.Contains(driver, "postgres") || strings.Contains(driver, "kingbase") || strings.Contains(driver, "pg")
|
||||
isOracle := strings.Contains(driver, "oracle") || strings.Contains(driver, "ora") || strings.Contains(driver, "dm") || strings.Contains(driver, "dameng")
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
if isMySQL {
|
||||
n = strings.Trim(n, "`")
|
||||
n = strings.ReplaceAll(n, "`", "``")
|
||||
if n == "" {
|
||||
return "``"
|
||||
}
|
||||
return "`" + n + "`"
|
||||
}
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
placeholder := func(idx int) string {
|
||||
if isPostgres {
|
||||
return fmt.Sprintf("$%d", idx)
|
||||
}
|
||||
if isOracle {
|
||||
return fmt.Sprintf(":%d", idx)
|
||||
}
|
||||
// MySQL / SQLite / default
|
||||
return "?"
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = %s", quoteIdent(k), placeholder(idx)))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = %s", quoteIdent(k), placeholder(idx)))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = %s", quoteIdent(k), placeholder(idx)))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, placeholder(idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
@@ -19,6 +21,7 @@ import (
|
||||
type DamengDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder // Store SSH tunnel forwarder
|
||||
}
|
||||
|
||||
func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
@@ -26,16 +29,6 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// or dm://user:password@host:port
|
||||
|
||||
address := net.JoinHostPort(config.Host, strconv.Itoa(config.Port))
|
||||
if config.UseSSH {
|
||||
// SSH logic similar to others, assumes port forwarding
|
||||
_, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// DM driver likely uses standard net.Dial, so we might need a local listener
|
||||
// or assume port forwarding is handled externally or implicitly via "tcp" override if driver allows.
|
||||
// Similar to Oracle, we skip complex custom dialer injection for now.
|
||||
}
|
||||
}
|
||||
|
||||
escapedPassword := url.PathEscape(config.Password)
|
||||
q := url.Values{}
|
||||
if config.Database != "" {
|
||||
@@ -55,7 +48,42 @@ func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := d.getDSN(config)
|
||||
var dsn string
|
||||
var err error
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
logger.Infof("达梦数据库使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
d.forwarder = forwarder
|
||||
|
||||
// Parse local address
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
// Create a modified config pointing to local forwarder
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = d.getDSN(localConfig)
|
||||
logger.Infof("达梦数据库通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = d.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
@@ -69,6 +97,15 @@ func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
|
||||
func (d *DamengDB) Close() error {
|
||||
// Close SSH forwarder first if exists
|
||||
if d.forwarder != nil {
|
||||
if err := d.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭达梦数据库 SSH 端口转发失败:%v", err)
|
||||
}
|
||||
d.forwarder = nil
|
||||
}
|
||||
|
||||
// Then close database connection
|
||||
if d.conn != nil {
|
||||
return d.conn.Close()
|
||||
}
|
||||
@@ -88,6 +125,20 @@ func (d *DamengDB) Ping() error {
|
||||
return d.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (d *DamengDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := d.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -98,41 +149,18 @@ func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (d *DamengDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := d.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
@@ -166,7 +194,7 @@ func (d *DamengDB) GetDatabases() ([]string, error) {
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
query := fmt.Sprintf("SELECT table_name FROM all_tables WHERE owner = '%s'", strings.ToUpper(dbName))
|
||||
query := fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
if dbName == "" {
|
||||
query = "SELECT table_name FROM user_tables"
|
||||
}
|
||||
@@ -178,6 +206,14 @@ func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if dbName != "" {
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if val, ok := row["TABLE_NAME"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
@@ -337,7 +373,117 @@ func (d *DamengDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
}
|
||||
|
||||
func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode implemented for Dameng so far")
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := d.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf(":%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
|
||||
@@ -40,6 +40,20 @@ func NewDatabase(dbType string) (Database, error) {
|
||||
return &DamengDB{}, nil
|
||||
case "kingbase":
|
||||
return &KingbaseDB{}, nil
|
||||
case "mongodb":
|
||||
return &MongoDB{}, nil
|
||||
case "sqlserver":
|
||||
return &SqlServerDB{}, nil
|
||||
case "highgo":
|
||||
return &HighGoDB{}, nil
|
||||
case "mariadb":
|
||||
return &MariaDB{}, nil
|
||||
case "sphinx":
|
||||
return &SphinxDB{}, nil
|
||||
case "vastbase":
|
||||
return &VastbaseDB{}, nil
|
||||
case "tdengine":
|
||||
return &TDengineDB{}, nil
|
||||
case "custom":
|
||||
return &CustomDB{}, nil
|
||||
default:
|
||||
|
||||
@@ -95,3 +95,20 @@ func TestKingbaseDSN_QuotesPasswordWithSpaces(t *testing.T) {
|
||||
t.Fatalf("dsn 未对包含空格的密码进行引号包裹:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTDengineDSN_UsesWebSocketFormat(t *testing.T) {
|
||||
td := &TDengineDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "tdengine",
|
||||
Host: "127.0.0.1",
|
||||
Port: 6041,
|
||||
User: "root",
|
||||
Password: "taosdata",
|
||||
Database: "power",
|
||||
}
|
||||
|
||||
dsn := td.getDSN(cfg)
|
||||
if !strings.HasPrefix(dsn, "root:taosdata@ws(127.0.0.1:6041)/power") {
|
||||
t.Fatalf("tdengine dsn 格式不正确:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
628
internal/db/highgo_impl.go
Normal file
628
internal/db/highgo_impl.go
Normal file
@@ -0,0 +1,628 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/highgo/pq-sm3" // HighGo uses dedicated SM3-capable driver
|
||||
)
|
||||
|
||||
// HighGoDB implements Database interface for HighGo (瀚高) database
|
||||
// HighGo is a PostgreSQL-compatible database, so we reuse PostgreSQL driver
|
||||
type HighGoDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
func (h *HighGoDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// postgres://user:password@host:port/dbname?sslmode=disable
|
||||
dbname := config.Database
|
||||
if dbname == "" {
|
||||
dbname = "highgo" // HighGo default database
|
||||
}
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "postgres",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
Path: "/" + dbname,
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("HighGo 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
h.forwarder = forwarder
|
||||
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = h.getDSN(localConfig)
|
||||
logger.Infof("HighGo 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = h.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("highgo", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
h.conn = db
|
||||
h.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := h.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Close() error {
|
||||
if h.forwarder != nil {
|
||||
if err := h.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 HighGo SSH 端口转发失败:%v", err)
|
||||
}
|
||||
h.forwarder = nil
|
||||
}
|
||||
|
||||
if h.conn != nil {
|
||||
return h.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Ping() error {
|
||||
if h.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := h.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return h.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (h *HighGoDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if h.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := h.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if h.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := h.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (h *HighGoDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if h.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := h.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (h *HighGoDB) Exec(query string) (int64, error) {
|
||||
if h.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := h.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := h.Query("SELECT datname FROM pg_database WHERE datistemplate = false")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["datname"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetTables(dbName string) ([]string, error) {
|
||||
query := "SELECT schemaname, tablename FROM pg_catalog.pg_tables WHERE schemaname != 'information_schema' AND schemaname NOT LIKE 'pg_%' ORDER BY schemaname, tablename"
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if okName {
|
||||
tables = append(tables, fmt.Sprintf("%v", name))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return fmt.Sprintf("-- SHOW CREATE TABLE not fully supported for HighGo in this version.\n-- Table: %s", tableName), nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = '%s'
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
if v, ok := row["column_default"]; ok && v != nil {
|
||||
def := fmt.Sprintf("%v", v)
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique,
|
||||
x.ordinality AS seq_in_index,
|
||||
am.amname AS index_type
|
||||
FROM pg_class t
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_am am ON i.relam = am.oid
|
||||
JOIN unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinality) ON TRUE
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = x.attnum
|
||||
WHERE t.relkind IN ('r', 'p')
|
||||
AND t.relname = '%s'
|
||||
AND n.nspname = '%s'
|
||||
ORDER BY i.relname, x.ordinality`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parseBool := func(v interface{}) bool {
|
||||
switch val := v.(type) {
|
||||
case bool:
|
||||
return val
|
||||
case string:
|
||||
s := strings.ToLower(strings.TrimSpace(val))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
default:
|
||||
s := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", v)))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
}
|
||||
}
|
||||
|
||||
parseInt := func(v interface{}) int {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
return val
|
||||
case int64:
|
||||
return int(val)
|
||||
case float64:
|
||||
return int(val)
|
||||
case string:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(val), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", v)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
isUnique := false
|
||||
if v, ok := row["is_unique"]; ok && v != nil {
|
||||
isUnique = parseBool(v)
|
||||
}
|
||||
|
||||
nonUnique := 1
|
||||
if isUnique {
|
||||
nonUnique = 0
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if v, ok := row["seq_in_index"]; ok && v != nil {
|
||||
seq = parseInt(v)
|
||||
}
|
||||
|
||||
indexType := ""
|
||||
if v, ok := row["index_type"]; ok && v != nil {
|
||||
indexType = strings.ToUpper(fmt.Sprintf("%v", v))
|
||||
}
|
||||
if indexType == "" {
|
||||
indexType = "BTREE"
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["index_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: indexType,
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name AS constraint_name,
|
||||
kcu.column_name AS column_name,
|
||||
ccu.table_schema AS foreign_table_schema,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_name = '%s'
|
||||
AND tc.table_schema = '%s'
|
||||
ORDER BY tc.constraint_name, kcu.ordinal_position`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
refSchema := ""
|
||||
if v, ok := row["foreign_table_schema"]; ok && v != nil {
|
||||
refSchema = fmt.Sprintf("%v", v)
|
||||
}
|
||||
refTable := fmt.Sprintf("%v", row["foreign_table_name"])
|
||||
refTableName := refTable
|
||||
if strings.TrimSpace(refSchema) != "" {
|
||||
refTableName = fmt.Sprintf("%s.%s", refSchema, refTable)
|
||||
}
|
||||
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
RefTableName: refTableName,
|
||||
RefColumnName: fmt.Sprintf("%v", row["foreign_column_name"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT trigger_name, action_timing, event_manipulation, action_statement
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s'
|
||||
AND event_object_schema = '%s'
|
||||
ORDER BY trigger_name, event_manipulation`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["trigger_name"]),
|
||||
Timing: fmt.Sprintf("%v", row["action_timing"]),
|
||||
Event: fmt.Sprintf("%v", row["event_manipulation"]),
|
||||
Statement: fmt.Sprintf("%v", row["action_statement"]),
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := `
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name, ordinal_position`
|
||||
|
||||
data, _, err := h.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
schema := fmt.Sprintf("%v", row["table_schema"])
|
||||
table := fmt.Sprintf("%v", row["table_name"])
|
||||
tableName := table
|
||||
if strings.TrimSpace(schema) != "" {
|
||||
tableName = fmt.Sprintf("%s.%s", schema, table)
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
func (h *HighGoDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if h.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := h.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
@@ -16,6 +20,7 @@ import (
|
||||
type KingbaseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder // Store SSH tunnel forwarder
|
||||
}
|
||||
|
||||
func quoteConnValue(v string) string {
|
||||
@@ -57,20 +62,6 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
address := config.Host
|
||||
port := config.Port
|
||||
|
||||
if config.UseSSH {
|
||||
netName, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// Kingbase/Postgres lib/pq allows custom dialer via "host" if using unix socket,
|
||||
// but for custom network it's harder.
|
||||
// Ideally we use a local forwarder.
|
||||
// For now, we assume standard TCP or handle SSH externally.
|
||||
// If we implement the net.Dial override for "kingbase" driver (which might use lib/pq internally),
|
||||
// we might need to check if it supports "cloudsql" style or similar custom dialers.
|
||||
// Similar to others, skipping SSH deep integration here for now.
|
||||
_ = netName
|
||||
}
|
||||
}
|
||||
|
||||
// Construct DSN
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d",
|
||||
quoteConnValue(address),
|
||||
@@ -85,7 +76,42 @@ func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := k.getDSN(config)
|
||||
var dsn string
|
||||
var err error
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
logger.Infof("人大金仓使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
k.forwarder = forwarder
|
||||
|
||||
// Parse local address
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
// Create a modified config pointing to local forwarder
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = k.getDSN(localConfig)
|
||||
logger.Infof("人大金仓通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = k.getDSN(config)
|
||||
}
|
||||
|
||||
// Open using "kingbase" driver
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
if err != nil {
|
||||
@@ -100,6 +126,15 @@ func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Close() error {
|
||||
// Close SSH forwarder first if exists
|
||||
if k.forwarder != nil {
|
||||
if err := k.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭人大金仓 SSH 端口转发失败:%v", err)
|
||||
}
|
||||
k.forwarder = nil
|
||||
}
|
||||
|
||||
// Then close database connection
|
||||
if k.conn != nil {
|
||||
return k.conn.Close()
|
||||
}
|
||||
@@ -119,6 +154,20 @@ func (k *KingbaseDB) Ping() error {
|
||||
return k.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if k.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := k.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if k.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -129,41 +178,18 @@ func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, er
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (k *KingbaseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if k.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := k.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Exec(query string) (int64, error) {
|
||||
@@ -193,15 +219,14 @@ func (k *KingbaseDB) GetDatabases() ([]string, error) {
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetTables(dbName string) ([]string, error) {
|
||||
// Usually restricted to current database connection in PG/Kingbase
|
||||
// dbName param is often Schema in PG context, or ignored if we are connected to a specific DB.
|
||||
// But in PG, cross-database queries are not standard without dblink.
|
||||
// We assume dbName here might mean Schema (public, etc.)
|
||||
|
||||
query := "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
|
||||
if dbName != "" && dbName != "public" {
|
||||
query = fmt.Sprintf("SELECT table_name FROM information_schema.tables WHERE table_schema = '%s'", dbName)
|
||||
}
|
||||
// Kingbase: tables are scoped by the current DB connection; include schema to avoid search_path issues.
|
||||
query := `
|
||||
SELECT table_schema AS schemaname, table_name AS tablename
|
||||
FROM information_schema.tables
|
||||
WHERE table_type = 'BASE TABLE'
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name`
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -210,6 +235,12 @@ func (k *KingbaseDB) GetTables(dbName string) ([]string, error) {
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if val, ok := row["table_name"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
@@ -226,15 +257,84 @@ func (k *KingbaseDB) GetCreateStatement(dbName, tableName string) (string, error
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
// 解析 schema.table 格式
|
||||
schema := strings.TrimSpace(dbName)
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
// 如果 tableName 包含 schema (格式: schema.table)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
parsedSchema := strings.TrimSpace(parts[0])
|
||||
parsedTable := strings.TrimSpace(parts[1])
|
||||
if parsedSchema != "" && parsedTable != "" {
|
||||
schema = parsedSchema
|
||||
table = parsedTable
|
||||
}
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s' AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, schema, tableName)
|
||||
// 如果仍然没有 schema,使用 current_schema()
|
||||
// 这样可以自动匹配当前连接的 search_path
|
||||
if schema == "" {
|
||||
return k.getColumnsWithCurrentSchema(table)
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
esc := func(s string) string {
|
||||
// 移除前后的双引号(如果存在)
|
||||
s = strings.Trim(s, "\"")
|
||||
// 转义单引号
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s' AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
// getColumnsWithCurrentSchema 使用 current_schema() 查询当前schema的表
|
||||
func (k *KingbaseDB) getColumnsWithCurrentSchema(tableName string) ([]connection.ColumnDefinition, error) {
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
// 转义函数
|
||||
esc := func(s string) string {
|
||||
s = strings.Trim(s, "\"")
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
// 使用 current_schema() 获取当前schema
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = current_schema() AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, esc(table))
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -260,32 +360,76 @@ func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDe
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
// Postgres/Kingbase index query
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
a.attname as column_name,
|
||||
ix.indisunique as is_unique
|
||||
FROM
|
||||
pg_class t,
|
||||
pg_class i,
|
||||
pg_index ix,
|
||||
pg_attribute a,
|
||||
pg_namespace n
|
||||
WHERE
|
||||
t.oid = ix.indrelid
|
||||
AND i.oid = ix.indexrelid
|
||||
AND a.attrelid = t.oid
|
||||
AND a.attnum = ANY(ix.indkey)
|
||||
AND t.relkind = 'r'
|
||||
AND t.relname = '%s'
|
||||
AND n.oid = t.relnamespace
|
||||
AND n.nspname = '%s'
|
||||
`, tableName, "public") // Default to public if dbName (schema) not clear.
|
||||
// 解析 schema.table 格式
|
||||
schema := strings.TrimSpace(dbName)
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
if dbName != "" {
|
||||
// Update query to use dbName as schema
|
||||
query = strings.Replace(query, "'public'", fmt.Sprintf("'%s'", dbName), 1)
|
||||
// 如果 tableName 包含 schema (格式: schema.table)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
parsedSchema := strings.TrimSpace(parts[0])
|
||||
parsedTable := strings.TrimSpace(parts[1])
|
||||
if parsedSchema != "" && parsedTable != "" {
|
||||
schema = parsedSchema
|
||||
table = parsedTable
|
||||
}
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
esc := func(s string) string {
|
||||
s = strings.Trim(s, "\"")
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
// 构建查询:如果没有指定schema,使用current_schema()
|
||||
var query string
|
||||
if schema != "" {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
a.attname as column_name,
|
||||
ix.indisunique as is_unique
|
||||
FROM
|
||||
pg_class t,
|
||||
pg_class i,
|
||||
pg_index ix,
|
||||
pg_attribute a,
|
||||
pg_namespace n
|
||||
WHERE
|
||||
t.oid = ix.indrelid
|
||||
AND i.oid = ix.indexrelid
|
||||
AND a.attrelid = t.oid
|
||||
AND a.attnum = ANY(ix.indkey)
|
||||
AND t.relkind = 'r'
|
||||
AND t.relname = '%s'
|
||||
AND n.oid = t.relnamespace
|
||||
AND n.nspname = '%s'
|
||||
`, esc(table), esc(schema))
|
||||
} else {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
a.attname as column_name,
|
||||
ix.indisunique as is_unique
|
||||
FROM
|
||||
pg_class t,
|
||||
pg_class i,
|
||||
pg_index ix,
|
||||
pg_attribute a,
|
||||
pg_namespace n
|
||||
WHERE
|
||||
t.oid = ix.indrelid
|
||||
AND i.oid = ix.indexrelid
|
||||
AND a.attrelid = t.oid
|
||||
AND a.attnum = ANY(ix.indkey)
|
||||
AND t.relkind = 'r'
|
||||
AND t.relname = '%s'
|
||||
AND n.oid = t.relnamespace
|
||||
AND n.nspname = current_schema()
|
||||
`, esc(table))
|
||||
}
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
@@ -314,27 +458,67 @@ func (k *KingbaseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDef
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
// 解析 schema.table 格式
|
||||
schema := strings.TrimSpace(dbName)
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
// 如果 tableName 包含 schema (格式: schema.table)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
parsedSchema := strings.TrimSpace(parts[0])
|
||||
parsedTable := strings.TrimSpace(parts[1])
|
||||
if parsedSchema != "" && parsedTable != "" {
|
||||
schema = parsedSchema
|
||||
table = parsedTable
|
||||
}
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name,
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM
|
||||
information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name='%s' AND tc.table_schema='%s'`,
|
||||
tableName, schema)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
esc := func(s string) string {
|
||||
s = strings.Trim(s, "\"")
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
// 构建查询:如果没有指定schema,使用current_schema()
|
||||
var query string
|
||||
if schema != "" {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name,
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM
|
||||
information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name='%s' AND tc.table_schema='%s'`,
|
||||
esc(table), esc(schema))
|
||||
} else {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name,
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM
|
||||
information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name='%s' AND tc.table_schema=current_schema()`,
|
||||
esc(table))
|
||||
}
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -356,9 +540,43 @@ func (k *KingbaseDB) GetForeignKeys(dbName, tableName string) ([]connection.Fore
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT trigger_name, action_timing, event_manipulation
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s'`, tableName)
|
||||
// 解析 schema.table 格式
|
||||
schema := strings.TrimSpace(dbName)
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
// 如果 tableName 包含 schema (格式: schema.table)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
parsedSchema := strings.TrimSpace(parts[0])
|
||||
parsedTable := strings.TrimSpace(parts[1])
|
||||
if parsedSchema != "" && parsedTable != "" {
|
||||
schema = parsedSchema
|
||||
table = parsedTable
|
||||
}
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
// 转义函数:处理单引号,移除双引号
|
||||
esc := func(s string) string {
|
||||
s = strings.Trim(s, "\"")
|
||||
return strings.ReplaceAll(s, "'", "''")
|
||||
}
|
||||
|
||||
// 构建查询:如果指定了schema,也加上schema条件
|
||||
var query string
|
||||
if schema != "" {
|
||||
query = fmt.Sprintf(`SELECT trigger_name, action_timing, event_manipulation
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s' AND event_object_schema = '%s'`,
|
||||
esc(table), esc(schema))
|
||||
} else {
|
||||
query = fmt.Sprintf(`SELECT trigger_name, action_timing, event_manipulation
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s' AND event_object_schema = current_schema()`,
|
||||
esc(table))
|
||||
}
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -379,18 +597,127 @@ func (k *KingbaseDB) GetTriggers(dbName, tableName string) ([]connection.Trigger
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode implemented for Kingbase so far")
|
||||
if k.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := k.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s'`, schema)
|
||||
// dbName 在本项目语义里是“数据库”,schema 由 table_schema 决定;这里返回全部用户 schema 的列用于查询提示。
|
||||
query := `
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name, ordinal_position`
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
@@ -399,8 +726,14 @@ func (k *KingbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinition
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
schema := fmt.Sprintf("%v", row["table_schema"])
|
||||
table := fmt.Sprintf("%v", row["table_name"])
|
||||
tableName := table
|
||||
if strings.TrimSpace(schema) != "" {
|
||||
tableName = fmt.Sprintf("%s.%s", schema, table)
|
||||
}
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: fmt.Sprintf("%v", row["table_name"]),
|
||||
TableName: tableName,
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
|
||||
409
internal/db/mariadb_impl.go
Normal file
409
internal/db/mariadb_impl.go
Normal file
@@ -0,0 +1,409 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
)
|
||||
|
||||
// MariaDB implements Database interface for MariaDB
|
||||
// MariaDB is MySQL-compatible, so we reuse the MySQL driver
|
||||
type MariaDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (m *MariaDB) getDSN(config connection.ConnectionConfig) string {
|
||||
database := config.Database
|
||||
protocol := "tcp"
|
||||
address := fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
|
||||
if config.UseSSH {
|
||||
netName, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
protocol = netName
|
||||
address = fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
} else {
|
||||
logger.Warnf("注册 SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
}
|
||||
}
|
||||
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
}
|
||||
|
||||
func (m *MariaDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := m.getDSN(config)
|
||||
db, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
m.conn = db
|
||||
m.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := m.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) Close() error {
|
||||
if m.conn != nil {
|
||||
return m.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) Ping() error {
|
||||
if m.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := m.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return m.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (m *MariaDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if m.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := m.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (m *MariaDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if m.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := m.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (m *MariaDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if m.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := m.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (m *MariaDB) Exec(query string) (int64, error) {
|
||||
if m.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := m.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := m.Query("SHOW DATABASES")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["Database"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
} else if val, ok := row["database"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetTables(dbName string) ([]string, error) {
|
||||
query := "SHOW TABLES"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
for _, v := range row {
|
||||
tables = append(tables, fmt.Sprintf("%v", v))
|
||||
break
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
query := fmt.Sprintf("SHOW CREATE TABLE `%s`.`%s`", dbName, tableName)
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf("SHOW CREATE TABLE `%s`", tableName)
|
||||
}
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["Create Table"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
query := fmt.Sprintf("SHOW FULL COLUMNS FROM `%s`.`%s`", dbName, tableName)
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf("SHOW FULL COLUMNS FROM `%s`", tableName)
|
||||
}
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Field"]),
|
||||
Type: fmt.Sprintf("%v", row["Type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["Null"]),
|
||||
Key: fmt.Sprintf("%v", row["Key"]),
|
||||
Extra: fmt.Sprintf("%v", row["Extra"]),
|
||||
Comment: fmt.Sprintf("%v", row["Comment"]),
|
||||
}
|
||||
|
||||
if row["Default"] != nil {
|
||||
d := fmt.Sprintf("%v", row["Default"])
|
||||
col.Default = &d
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
query := fmt.Sprintf("SHOW INDEX FROM `%s`.`%s`", dbName, tableName)
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf("SHOW INDEX FROM `%s`", tableName)
|
||||
}
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
nonUnique := 0
|
||||
if val, ok := row["Non_unique"]; ok {
|
||||
if f, ok := val.(float64); ok {
|
||||
nonUnique = int(f)
|
||||
} else if i, ok := val.(int64); ok {
|
||||
nonUnique = int(i)
|
||||
}
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
if f, ok := val.(float64); ok {
|
||||
seq = int(f)
|
||||
} else if i, ok := val.(int64); ok {
|
||||
seq = int(i)
|
||||
}
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Key_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["Column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: fmt.Sprintf("%v", row["Index_type"]),
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT CONSTRAINT_NAME, COLUMN_NAME, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME
|
||||
FROM information_schema.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' AND REFERENCED_TABLE_NAME IS NOT NULL`, dbName, tableName)
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
RefTableName: fmt.Sprintf("%v", row["REFERENCED_TABLE_NAME"]),
|
||||
RefColumnName: fmt.Sprintf("%v", row["REFERENCED_COLUMN_NAME"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
query := fmt.Sprintf("SHOW TRIGGERS FROM `%s` WHERE `Table` = '%s'", dbName, tableName)
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Trigger"]),
|
||||
Timing: fmt.Sprintf("%v", row["Timing"]),
|
||||
Event: fmt.Sprintf("%v", row["Event"]),
|
||||
Statement: fmt.Sprintf("%v", row["Statement"]),
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (m *MariaDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if m.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := m.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
for k, v := range pk {
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM `%s` WHERE %s", tableName, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
|
||||
for k, v := range update.Values {
|
||||
sets = append(sets, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE `%s` SET %s WHERE %s", tableName, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
|
||||
for k, v := range row {
|
||||
cols = append(cols, fmt.Sprintf("`%s`", k))
|
||||
placeholders = append(placeholders, "?")
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO `%s` (%s) VALUES (%s)", tableName, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (m *MariaDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := fmt.Sprintf("SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '%s'", dbName)
|
||||
if dbName == "" {
|
||||
return nil, fmt.Errorf("database name required for GetAllColumns")
|
||||
}
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: fmt.Sprintf("%v", row["TABLE_NAME"]),
|
||||
Name: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
Type: fmt.Sprintf("%v", row["COLUMN_TYPE"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
1144
internal/db/mongodb_impl.go
Normal file
1144
internal/db/mongodb_impl.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,11 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -19,16 +22,161 @@ type MySQLDB struct {
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
const defaultMySQLPort = 3306
|
||||
|
||||
func parseHostPortWithDefault(raw string, defaultPort int) (string, int, bool) {
|
||||
text := strings.TrimSpace(raw)
|
||||
if text == "" {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
if strings.HasPrefix(text, "[") {
|
||||
end := strings.Index(text, "]")
|
||||
if end < 0 {
|
||||
return text, defaultPort, true
|
||||
}
|
||||
host := text[1:end]
|
||||
portText := strings.TrimSpace(text[end+1:])
|
||||
if strings.HasPrefix(portText, ":") {
|
||||
if p, err := strconv.Atoi(strings.TrimSpace(strings.TrimPrefix(portText, ":"))); err == nil && p > 0 {
|
||||
return host, p, true
|
||||
}
|
||||
}
|
||||
return host, defaultPort, true
|
||||
}
|
||||
|
||||
lastColon := strings.LastIndex(text, ":")
|
||||
if lastColon > 0 && strings.Count(text, ":") == 1 {
|
||||
host := strings.TrimSpace(text[:lastColon])
|
||||
portText := strings.TrimSpace(text[lastColon+1:])
|
||||
if host != "" {
|
||||
if p, err := strconv.Atoi(portText); err == nil && p > 0 {
|
||||
return host, p, true
|
||||
}
|
||||
return host, defaultPort, true
|
||||
}
|
||||
}
|
||||
|
||||
return text, defaultPort, true
|
||||
}
|
||||
|
||||
func normalizeMySQLAddress(host string, port int) string {
|
||||
h := strings.TrimSpace(host)
|
||||
if h == "" {
|
||||
h = "localhost"
|
||||
}
|
||||
p := port
|
||||
if p <= 0 {
|
||||
p = defaultMySQLPort
|
||||
}
|
||||
return fmt.Sprintf("%s:%d", h, p)
|
||||
}
|
||||
|
||||
func applyMySQLURI(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return config
|
||||
}
|
||||
if !strings.HasPrefix(strings.ToLower(uriText), "mysql://") {
|
||||
return config
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return config
|
||||
}
|
||||
|
||||
if parsed.User != nil {
|
||||
if config.User == "" {
|
||||
config.User = parsed.User.Username()
|
||||
}
|
||||
if pass, ok := parsed.User.Password(); ok && config.Password == "" {
|
||||
config.Password = pass
|
||||
}
|
||||
}
|
||||
|
||||
if dbName := strings.TrimPrefix(parsed.Path, "/"); dbName != "" && config.Database == "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
|
||||
defaultPort := config.Port
|
||||
if defaultPort <= 0 {
|
||||
defaultPort = defaultMySQLPort
|
||||
}
|
||||
|
||||
hostsFromURI := make([]string, 0, 4)
|
||||
hostText := strings.TrimSpace(parsed.Host)
|
||||
if hostText != "" {
|
||||
for _, entry := range strings.Split(hostText, ",") {
|
||||
host, port, ok := parseHostPortWithDefault(entry, defaultPort)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
hostsFromURI = append(hostsFromURI, normalizeMySQLAddress(host, port))
|
||||
}
|
||||
}
|
||||
|
||||
if len(config.Hosts) == 0 && len(hostsFromURI) > 0 {
|
||||
config.Hosts = hostsFromURI
|
||||
}
|
||||
if strings.TrimSpace(config.Host) == "" && len(hostsFromURI) > 0 {
|
||||
host, port, ok := parseHostPortWithDefault(hostsFromURI[0], defaultPort)
|
||||
if ok {
|
||||
config.Host = host
|
||||
config.Port = port
|
||||
}
|
||||
}
|
||||
|
||||
if config.Topology == "" {
|
||||
topology := strings.TrimSpace(parsed.Query().Get("topology"))
|
||||
if topology != "" {
|
||||
config.Topology = strings.ToLower(topology)
|
||||
}
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
func collectMySQLAddresses(config connection.ConnectionConfig) []string {
|
||||
defaultPort := config.Port
|
||||
if defaultPort <= 0 {
|
||||
defaultPort = defaultMySQLPort
|
||||
}
|
||||
|
||||
candidates := make([]string, 0, len(config.Hosts)+1)
|
||||
if len(config.Hosts) > 0 {
|
||||
candidates = append(candidates, config.Hosts...)
|
||||
} else {
|
||||
candidates = append(candidates, normalizeMySQLAddress(config.Host, defaultPort))
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(candidates))
|
||||
seen := make(map[string]struct{}, len(candidates))
|
||||
for _, entry := range candidates {
|
||||
host, port, ok := parseHostPortWithDefault(entry, defaultPort)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
normalized := normalizeMySQLAddress(host, port)
|
||||
if _, exists := seen[normalized]; exists {
|
||||
continue
|
||||
}
|
||||
seen[normalized] = struct{}{}
|
||||
result = append(result, normalized)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (m *MySQLDB) getDSN(config connection.ConnectionConfig) string {
|
||||
database := config.Database
|
||||
protocol := "tcp"
|
||||
address := fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
address := normalizeMySQLAddress(config.Host, config.Port)
|
||||
|
||||
if config.UseSSH {
|
||||
netName, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
protocol = netName
|
||||
address = fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
address = normalizeMySQLAddress(config.Host, config.Port)
|
||||
} else {
|
||||
logger.Warnf("注册 SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
}
|
||||
@@ -40,20 +188,67 @@ func (m *MySQLDB) getDSN(config connection.ConnectionConfig) string {
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
}
|
||||
|
||||
func resolveMySQLCredential(config connection.ConnectionConfig, addressIndex int) (string, string) {
|
||||
primaryUser := strings.TrimSpace(config.User)
|
||||
primaryPassword := config.Password
|
||||
replicaUser := strings.TrimSpace(config.MySQLReplicaUser)
|
||||
replicaPassword := config.MySQLReplicaPassword
|
||||
|
||||
if addressIndex > 0 && replicaUser != "" {
|
||||
return replicaUser, replicaPassword
|
||||
}
|
||||
|
||||
if primaryUser == "" && replicaUser != "" {
|
||||
return replicaUser, replicaPassword
|
||||
}
|
||||
|
||||
return config.User, primaryPassword
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := m.getDSN(config)
|
||||
db, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
runConfig := applyMySQLURI(config)
|
||||
addresses := collectMySQLAddresses(runConfig)
|
||||
if len(addresses) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 MySQL 地址")
|
||||
}
|
||||
m.conn = db
|
||||
m.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
// Force verification
|
||||
if err := m.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
|
||||
var errorDetails []string
|
||||
for index, address := range addresses {
|
||||
candidateConfig := runConfig
|
||||
host, port, ok := parseHostPortWithDefault(address, defaultMySQLPort)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
candidateConfig.Host = host
|
||||
candidateConfig.Port = port
|
||||
candidateConfig.User, candidateConfig.Password = resolveMySQLCredential(runConfig, index)
|
||||
|
||||
dsn := m.getDSN(candidateConfig)
|
||||
db, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s 打开失败: %v", address, err))
|
||||
continue
|
||||
}
|
||||
|
||||
timeout := getConnectTimeout(candidateConfig)
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
pingErr := db.PingContext(ctx)
|
||||
cancel()
|
||||
if pingErr != nil {
|
||||
_ = db.Close()
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s 验证失败: %v", address, pingErr))
|
||||
continue
|
||||
}
|
||||
|
||||
m.conn = db
|
||||
m.pingTimeout = timeout
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
|
||||
if len(errorDetails) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 MySQL 地址")
|
||||
}
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(errorDetails, ";"))
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Close() error {
|
||||
@@ -76,6 +271,20 @@ func (m *MySQLDB) Ping() error {
|
||||
return m.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (m *MySQLDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if m.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := m.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if m.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -86,41 +295,18 @@ func (m *MySQLDB) Query(query string) ([]map[string]interface{}, []string, error
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (m *MySQLDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if m.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := m.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Exec(query string) (int64, error) {
|
||||
@@ -155,12 +341,12 @@ func (m *MySQLDB) GetTables(dbName string) ([]string, error) {
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
for _, v := range row {
|
||||
@@ -181,7 +367,7 @@ func (m *MySQLDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["Create Table"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
@@ -211,12 +397,12 @@ func (m *MySQLDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefin
|
||||
Extra: fmt.Sprintf("%v", row["Extra"]),
|
||||
Comment: fmt.Sprintf("%v", row["Comment"]),
|
||||
}
|
||||
|
||||
|
||||
if row["Default"] != nil {
|
||||
d := fmt.Sprintf("%v", row["Default"])
|
||||
col.Default = &d
|
||||
}
|
||||
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
@@ -244,14 +430,14 @@ func (m *MySQLDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefini
|
||||
}
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
if f, ok := val.(float64); ok {
|
||||
seq = int(f)
|
||||
} else if i, ok := val.(int64); ok {
|
||||
seq = int(i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Key_name"]),
|
||||
@@ -326,27 +512,31 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
var args []interface{}
|
||||
for k, v := range pk {
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM `%s` WHERE %s", tableName, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
res, err := tx.Exec(query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
if affected, err := res.RowsAffected(); err == nil && affected == 0 {
|
||||
return fmt.Errorf("删除未生效:未匹配到任何行")
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range update.Values {
|
||||
sets = append(sets, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -354,17 +544,21 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE `%s` SET %s WHERE %s", tableName, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
res, err := tx.Exec(query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
if affected, err := res.RowsAffected(); err == nil && affected == 0 {
|
||||
return fmt.Errorf("更新未生效:未匹配到任何行")
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
@@ -372,26 +566,105 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range row {
|
||||
cols = append(cols, fmt.Sprintf("`%s`", k))
|
||||
placeholders = append(placeholders, "?")
|
||||
args = append(args, v)
|
||||
args = append(args, normalizeMySQLDateTimeValue(v))
|
||||
}
|
||||
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO `%s` (%s) VALUES (%s)", tableName, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
res, err := tx.Exec(query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
if affected, err := res.RowsAffected(); err == nil && affected == 0 {
|
||||
return fmt.Errorf("插入未生效:未影响任何行")
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func normalizeMySQLDateTimeValue(value interface{}) interface{} {
|
||||
text, ok := value.(string)
|
||||
if !ok {
|
||||
return value
|
||||
}
|
||||
raw := strings.TrimSpace(text)
|
||||
if raw == "" {
|
||||
return value
|
||||
}
|
||||
|
||||
cleaned := strings.ReplaceAll(raw, "+ ", "+")
|
||||
cleaned = strings.ReplaceAll(cleaned, "- ", "-")
|
||||
|
||||
if len(cleaned) >= 19 && cleaned[10] == 'T' {
|
||||
if strings.HasSuffix(cleaned, "Z") || hasTimezoneOffset(cleaned) {
|
||||
if t, err := time.Parse(time.RFC3339Nano, cleaned); err == nil {
|
||||
return formatMySQLDateTime(t)
|
||||
}
|
||||
if t, err := time.Parse(time.RFC3339, cleaned); err == nil {
|
||||
return formatMySQLDateTime(t)
|
||||
}
|
||||
}
|
||||
return strings.Replace(cleaned, "T", " ", 1)
|
||||
}
|
||||
|
||||
if strings.Contains(cleaned, " ") && (strings.HasSuffix(cleaned, "Z") || hasTimezoneOffset(cleaned)) {
|
||||
candidate := strings.Replace(cleaned, " ", "T", 1)
|
||||
if t, err := time.Parse(time.RFC3339Nano, candidate); err == nil {
|
||||
return formatMySQLDateTime(t)
|
||||
}
|
||||
if t, err := time.Parse(time.RFC3339, candidate); err == nil {
|
||||
return formatMySQLDateTime(t)
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func hasTimezoneOffset(text string) bool {
|
||||
pos := strings.LastIndexAny(text, "+-")
|
||||
if pos < 0 || pos < 10 || pos+1 >= len(text) {
|
||||
return false
|
||||
}
|
||||
offset := text[pos+1:]
|
||||
if len(offset) == 5 && offset[2] == ':' {
|
||||
return isAllDigits(offset[:2]) && isAllDigits(offset[3:])
|
||||
}
|
||||
if len(offset) == 4 {
|
||||
return isAllDigits(offset)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isAllDigits(text string) bool {
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
for _, r := range text {
|
||||
if r < '0' || r > '9' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func formatMySQLDateTime(t time.Time) string {
|
||||
base := t.Format("2006-01-02 15:04:05")
|
||||
nanos := t.Nanosecond()
|
||||
if nanos == 0 {
|
||||
return base
|
||||
}
|
||||
micro := nanos / 1000
|
||||
return fmt.Sprintf("%s.%06d", base, micro)
|
||||
}
|
||||
|
||||
func (m *MySQLDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := fmt.Sprintf("SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = '%s'", dbName)
|
||||
if dbName == "" {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
@@ -19,6 +21,7 @@ import (
|
||||
type OracleDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder // Store SSH tunnel forwarder
|
||||
}
|
||||
|
||||
func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
@@ -28,28 +31,6 @@ func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
database = config.User // Default to user service/schema if empty?
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
_, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// Oracle driver might not support custom dialer via DSN easily without extra config
|
||||
// But go-ora v2 supports some advanced options.
|
||||
// For simplicity, we assume standard TCP or we might need a workaround for SSH.
|
||||
// go-ora v2 is pure Go, so we can potentially use a custom dialer if we manually open.
|
||||
// But for now, let's just use the address.
|
||||
// SSH tunneling via net.Dialer override is complex in sql.Open("oracle", ...).
|
||||
// We might need to forward a local port if using SSH.
|
||||
// Since ssh.RegisterSSHNetwork creates a custom network "ssh-via-...",
|
||||
// we need to see if go-ora supports custom networks.
|
||||
// Checking go-ora docs (simulated): It supports "unix" and "tcp".
|
||||
// We might need to map the custom network to a local proxy.
|
||||
// For now, we will assume direct connection or handle SSH separately later.
|
||||
// We'll leave the protocol implementation as is in MySQL for now, hoping go-ora uses standard net.Dial.
|
||||
// Note: go-ora connection string: oracle://user:pass@host:port/service
|
||||
// It parses host/port. It doesn't easily take a custom "network" parameter in URL.
|
||||
// We will proceed with standard TCP string.
|
||||
}
|
||||
}
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "oracle",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
@@ -61,7 +42,42 @@ func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := o.getDSN(config)
|
||||
var dsn string
|
||||
var err error
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
logger.Infof("Oracle 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
o.forwarder = forwarder
|
||||
|
||||
// Parse local address
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
// Create a modified config pointing to local forwarder
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = o.getDSN(localConfig)
|
||||
logger.Infof("Oracle 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = o.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("oracle", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
@@ -75,6 +91,15 @@ func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
|
||||
func (o *OracleDB) Close() error {
|
||||
// Close SSH forwarder first if exists
|
||||
if o.forwarder != nil {
|
||||
if err := o.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 Oracle SSH 端口转发失败:%v", err)
|
||||
}
|
||||
o.forwarder = nil
|
||||
}
|
||||
|
||||
// Then close database connection
|
||||
if o.conn != nil {
|
||||
return o.conn.Close()
|
||||
}
|
||||
@@ -94,6 +119,20 @@ func (o *OracleDB) Ping() error {
|
||||
return o.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (o *OracleDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if o.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := o.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (o *OracleDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if o.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -104,41 +143,18 @@ func (o *OracleDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (o *OracleDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if o.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := o.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (o *OracleDB) Exec(query string) (int64, error) {
|
||||
@@ -171,7 +187,7 @@ func (o *OracleDB) GetTables(dbName string) ([]string, error) {
|
||||
// dbName is Schema/Owner
|
||||
query := "SELECT table_name FROM user_tables"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SELECT table_name FROM all_tables WHERE owner = '%s'", strings.ToUpper(dbName))
|
||||
query = fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
}
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
@@ -181,6 +197,14 @@ func (o *OracleDB) GetTables(dbName string) ([]string, error) {
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if dbName != "" {
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if val, ok := row["TABLE_NAME"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
@@ -339,8 +363,117 @@ func (o *OracleDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDe
|
||||
}
|
||||
|
||||
func (o *OracleDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
// TODO: Implement batch application for Oracle using correct syntax
|
||||
return fmt.Errorf("read-only mode implemented for Oracle so far")
|
||||
if o.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := o.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = :%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf(":%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
|
||||
@@ -1,24 +1,31 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/lib/pq"
|
||||
)
|
||||
|
||||
|
||||
type PostgresDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder // Store SSH tunnel forwarder
|
||||
}
|
||||
|
||||
|
||||
func (p *PostgresDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// postgres://user:password@host:port/dbname?sslmode=disable
|
||||
dbname := config.Database
|
||||
@@ -41,14 +48,49 @@ func (p *PostgresDB) getDSN(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := p.getDSN(config)
|
||||
var dsn string
|
||||
var err error
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
logger.Infof("PostgreSQL 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
p.forwarder = forwarder
|
||||
|
||||
// Parse local address
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
// Create a modified config pointing to local forwarder
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false // Disable SSH flag for DSN generation
|
||||
|
||||
dsn = p.getDSN(localConfig)
|
||||
logger.Infof("PostgreSQL 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = p.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
p.conn = db
|
||||
p.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
@@ -56,7 +98,17 @@ func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
func (p *PostgresDB) Close() error {
|
||||
// Close SSH forwarder first if exists
|
||||
if p.forwarder != nil {
|
||||
if err := p.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 PostgreSQL SSH 端口转发失败:%v", err)
|
||||
}
|
||||
p.forwarder = nil
|
||||
}
|
||||
|
||||
// Then close database connection
|
||||
if p.conn != nil {
|
||||
return p.conn.Close()
|
||||
}
|
||||
@@ -76,52 +128,42 @@ func (p *PostgresDB) Ping() error {
|
||||
return p.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
func (p *PostgresDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if p.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
|
||||
rows, err := p.conn.Query(query)
|
||||
rows, err := p.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
columns, err := rows.Columns()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if p.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := p.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
func (p *PostgresDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if p.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
res, err := p.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Exec(query string) (int64, error) {
|
||||
@@ -150,16 +192,22 @@ func (p *PostgresDB) GetDatabases() ([]string, error) {
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetTables(dbName string) ([]string, error) {
|
||||
query := "SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"
|
||||
query := "SELECT schemaname, tablename FROM pg_catalog.pg_tables WHERE schemaname != 'information_schema' AND schemaname NOT LIKE 'pg_%' ORDER BY schemaname, tablename"
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["tablename"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if okName {
|
||||
tables = append(tables, fmt.Sprintf("%v", name))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
@@ -170,21 +218,420 @@ func (p *PostgresDB) GetCreateStatement(dbName, tableName string) (string, error
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return []connection.ColumnDefinition{}, nil
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = '%s'
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
if v, ok := row["column_default"]; ok && v != nil {
|
||||
def := fmt.Sprintf("%v", v)
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique,
|
||||
x.ordinality AS seq_in_index,
|
||||
am.amname AS index_type
|
||||
FROM pg_class t
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_am am ON i.relam = am.oid
|
||||
JOIN unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinality) ON TRUE
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = x.attnum
|
||||
WHERE t.relkind IN ('r', 'p')
|
||||
AND t.relname = '%s'
|
||||
AND n.nspname = '%s'
|
||||
ORDER BY i.relname, x.ordinality`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parseBool := func(v interface{}) bool {
|
||||
switch val := v.(type) {
|
||||
case bool:
|
||||
return val
|
||||
case string:
|
||||
s := strings.ToLower(strings.TrimSpace(val))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
default:
|
||||
s := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", v)))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
}
|
||||
}
|
||||
|
||||
parseInt := func(v interface{}) int {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
return val
|
||||
case int64:
|
||||
return int(val)
|
||||
case float64:
|
||||
return int(val)
|
||||
case string:
|
||||
// best effort
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(val), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", v)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
isUnique := false
|
||||
if v, ok := row["is_unique"]; ok && v != nil {
|
||||
isUnique = parseBool(v)
|
||||
}
|
||||
|
||||
nonUnique := 1
|
||||
if isUnique {
|
||||
nonUnique = 0
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if v, ok := row["seq_in_index"]; ok && v != nil {
|
||||
seq = parseInt(v)
|
||||
}
|
||||
|
||||
indexType := ""
|
||||
if v, ok := row["index_type"]; ok && v != nil {
|
||||
indexType = strings.ToUpper(fmt.Sprintf("%v", v))
|
||||
}
|
||||
if indexType == "" {
|
||||
indexType = "BTREE"
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["index_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: indexType,
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name AS constraint_name,
|
||||
kcu.column_name AS column_name,
|
||||
ccu.table_schema AS foreign_table_schema,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_name = '%s'
|
||||
AND tc.table_schema = '%s'
|
||||
ORDER BY tc.constraint_name, kcu.ordinal_position`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
refSchema := ""
|
||||
if v, ok := row["foreign_table_schema"]; ok && v != nil {
|
||||
refSchema = fmt.Sprintf("%v", v)
|
||||
}
|
||||
refTable := fmt.Sprintf("%v", row["foreign_table_name"])
|
||||
refTableName := refTable
|
||||
if strings.TrimSpace(refSchema) != "" {
|
||||
refTableName = fmt.Sprintf("%s.%s", refSchema, refTable)
|
||||
}
|
||||
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
RefTableName: refTableName,
|
||||
RefColumnName: fmt.Sprintf("%v", row["foreign_column_name"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT trigger_name, action_timing, event_manipulation, action_statement
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s'
|
||||
AND event_object_schema = '%s'
|
||||
ORDER BY trigger_name, event_manipulation`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["trigger_name"]),
|
||||
Timing: fmt.Sprintf("%v", row["action_timing"]),
|
||||
Event: fmt.Sprintf("%v", row["event_manipulation"]),
|
||||
Statement: fmt.Sprintf("%v", row["action_statement"]),
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return []connection.ColumnDefinitionWithTable{}, nil
|
||||
query := `
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name, ordinal_position`
|
||||
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
schema := fmt.Sprintf("%v", row["table_schema"])
|
||||
table := fmt.Sprintf("%v", row["table_name"])
|
||||
tableName := table
|
||||
if strings.TrimSpace(schema) != "" {
|
||||
tableName = fmt.Sprintf("%s.%s", schema, table)
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if p.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := p.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
114
internal/db/query_value.go
Normal file
114
internal/db/query_value.go
Normal file
@@ -0,0 +1,114 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// normalizeQueryValue normalizes driver-returned values for UI/JSON transport.
|
||||
// 当前主要处理 []byte:如果是可读文本则转为 string,否则转为十六进制字符串,避免前端出现“空白值”。
|
||||
func normalizeQueryValue(v interface{}) interface{} {
|
||||
return normalizeQueryValueWithDBType(v, "")
|
||||
}
|
||||
|
||||
func normalizeQueryValueWithDBType(v interface{}, databaseTypeName string) interface{} {
|
||||
if b, ok := v.([]byte); ok {
|
||||
return bytesToDisplayValue(b, databaseTypeName)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func bytesToDisplayValue(b []byte, databaseTypeName string) interface{} {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
dbType := strings.ToUpper(strings.TrimSpace(databaseTypeName))
|
||||
if isBitLikeDBType(dbType) {
|
||||
if u, ok := bytesToUint64(b); ok {
|
||||
// JS number precision is limited; keep large bitmasks as string.
|
||||
const maxSafeInteger = 9007199254740991 // 2^53 - 1
|
||||
if u <= maxSafeInteger {
|
||||
return int64(u)
|
||||
}
|
||||
return fmt.Sprintf("%d", u)
|
||||
}
|
||||
}
|
||||
|
||||
if utf8.Valid(b) {
|
||||
s := string(b)
|
||||
if isMostlyPrintable(s) {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: some drivers return BIT(1) as []byte{0} / []byte{1} without type info.
|
||||
if dbType == "" && len(b) == 1 && (b[0] == 0 || b[0] == 1) {
|
||||
return int64(b[0])
|
||||
}
|
||||
|
||||
return bytesToReadableString(b)
|
||||
}
|
||||
|
||||
func bytesToReadableString(b []byte) interface{} {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return ""
|
||||
}
|
||||
return "0x" + hex.EncodeToString(b)
|
||||
}
|
||||
|
||||
func isBitLikeDBType(typeName string) bool {
|
||||
if typeName == "" {
|
||||
return false
|
||||
}
|
||||
switch typeName {
|
||||
case "BIT", "VARBIT":
|
||||
return true
|
||||
default:
|
||||
}
|
||||
return strings.HasPrefix(typeName, "BIT")
|
||||
}
|
||||
|
||||
func bytesToUint64(b []byte) (uint64, bool) {
|
||||
if len(b) == 0 || len(b) > 8 {
|
||||
return 0, false
|
||||
}
|
||||
var u uint64
|
||||
for _, v := range b {
|
||||
u = (u << 8) | uint64(v)
|
||||
}
|
||||
return u, true
|
||||
}
|
||||
|
||||
func isMostlyPrintable(s string) bool {
|
||||
if s == "" {
|
||||
return true
|
||||
}
|
||||
|
||||
total := 0
|
||||
printable := 0
|
||||
for _, r := range s {
|
||||
total++
|
||||
switch r {
|
||||
case '\n', '\r', '\t':
|
||||
printable++
|
||||
continue
|
||||
default:
|
||||
}
|
||||
if unicode.IsPrint(r) {
|
||||
printable++
|
||||
}
|
||||
}
|
||||
|
||||
// 允许少量不可见字符,避免把正常文本误判为二进制。
|
||||
return printable*100 >= total*90
|
||||
}
|
||||
44
internal/db/query_value_test.go
Normal file
44
internal/db/query_value_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_BitBytes(t *testing.T) {
|
||||
v := normalizeQueryValueWithDBType([]byte{0x00}, "BIT")
|
||||
if v != int64(0) {
|
||||
t.Fatalf("BIT 0x00 期望为 0,实际=%v(%T)", v, v)
|
||||
}
|
||||
|
||||
v = normalizeQueryValueWithDBType([]byte{0x01}, "bit")
|
||||
if v != int64(1) {
|
||||
t.Fatalf("BIT 0x01 期望为 1,实际=%v(%T)", v, v)
|
||||
}
|
||||
|
||||
v = normalizeQueryValueWithDBType([]byte{0x01, 0x02}, "BIT VARYING")
|
||||
if v != int64(258) {
|
||||
t.Fatalf("BIT 0x0102 期望为 258,实际=%v(%T)", v, v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_BitLargeAsString(t *testing.T) {
|
||||
v := normalizeQueryValueWithDBType([]byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}, "BIT")
|
||||
if s, ok := v.(string); !ok || s != "18446744073709551615" {
|
||||
t.Fatalf("BIT 0xffffffffffffffff 期望为 string(18446744073709551615),实际=%v(%T)", v, v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_ByteFallbacks(t *testing.T) {
|
||||
v := normalizeQueryValueWithDBType([]byte("abc"), "")
|
||||
if v != "abc" {
|
||||
t.Fatalf("文本 []byte 期望返回 string,实际=%v(%T)", v, v)
|
||||
}
|
||||
|
||||
v = normalizeQueryValueWithDBType([]byte{0x00}, "")
|
||||
if v != int64(0) {
|
||||
t.Fatalf("未知类型 0x00 期望返回 0,实际=%v(%T)", v, v)
|
||||
}
|
||||
|
||||
v = normalizeQueryValueWithDBType([]byte{0xff}, "")
|
||||
if v != "0xff" {
|
||||
t.Fatalf("未知类型 0xff 期望返回 0xff,实际=%v(%T)", v, v)
|
||||
}
|
||||
}
|
||||
46
internal/db/scan_rows.go
Normal file
46
internal/db/scan_rows.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
func scanRows(rows *sql.Rows) ([]map[string]interface{}, []string, error) {
|
||||
columns, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
colTypes, err := rows.ColumnTypes()
|
||||
if err != nil || len(colTypes) != len(columns) {
|
||||
colTypes = nil
|
||||
}
|
||||
|
||||
resultData := make([]map[string]interface{}, 0)
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{}, len(columns))
|
||||
for i, col := range columns {
|
||||
dbTypeName := ""
|
||||
if colTypes != nil && i < len(colTypes) && colTypes[i] != nil {
|
||||
dbTypeName = colTypes[i].DatabaseTypeName()
|
||||
}
|
||||
entry[col] = normalizeQueryValueWithDBType(values[i], dbTypeName)
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return resultData, columns, err
|
||||
}
|
||||
return resultData, columns, nil
|
||||
}
|
||||
136
internal/db/sphinx_impl.go
Normal file
136
internal/db/sphinx_impl.go
Normal file
@@ -0,0 +1,136 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
const sphinxDefaultDatabaseName = "default"
|
||||
|
||||
// SphinxDB 复用 MySQL 协议实现,并在数据库列表不可用时提供兜底。
|
||||
type SphinxDB struct {
|
||||
MySQLDB
|
||||
fallbackDatabase string
|
||||
}
|
||||
|
||||
func isSphinxUnsupportedFeatureError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
text := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
if text == "" {
|
||||
return false
|
||||
}
|
||||
keywords := []string{
|
||||
"not supported",
|
||||
"unsupported",
|
||||
"syntax error",
|
||||
"unknown table",
|
||||
"unknown column",
|
||||
"doesn't exist",
|
||||
}
|
||||
for _, keyword := range keywords {
|
||||
if strings.Contains(text, keyword) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *SphinxDB) Connect(config connection.ConnectionConfig) error {
|
||||
runConfig := applyMySQLURI(config)
|
||||
s.fallbackDatabase = strings.TrimSpace(runConfig.Database)
|
||||
return s.MySQLDB.Connect(config)
|
||||
}
|
||||
|
||||
func (s *SphinxDB) resolveDatabaseName(dbName string) string {
|
||||
name := strings.TrimSpace(dbName)
|
||||
if name == "" {
|
||||
return s.fallbackDatabase
|
||||
}
|
||||
if strings.EqualFold(name, sphinxDefaultDatabaseName) && s.fallbackDatabase == "" {
|
||||
return ""
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetDatabases() ([]string, error) {
|
||||
dbs, err := s.MySQLDB.GetDatabases()
|
||||
if err == nil && len(dbs) > 0 {
|
||||
return dbs, nil
|
||||
}
|
||||
if s.fallbackDatabase != "" {
|
||||
return []string{s.fallbackDatabase}, nil
|
||||
}
|
||||
return []string{sphinxDefaultDatabaseName}, nil
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetTables(dbName string) ([]string, error) {
|
||||
tables, err := s.MySQLDB.GetTables(s.resolveDatabaseName(dbName))
|
||||
if err == nil {
|
||||
return tables, nil
|
||||
}
|
||||
if !isSphinxUnsupportedFeatureError(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Sphinx/Manticore 常见返回列名为 `Index`,并且不支持 `SHOW TABLES FROM <db>` 语法。
|
||||
data, fields, fallbackErr := s.MySQLDB.Query("SHOW TABLES")
|
||||
if fallbackErr != nil {
|
||||
return nil, fallbackErr
|
||||
}
|
||||
|
||||
fallbackTables := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if val, ok := row["Index"]; ok {
|
||||
fallbackTables = append(fallbackTables, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
if val, ok := row["index"]; ok {
|
||||
fallbackTables = append(fallbackTables, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, field := range fields {
|
||||
if val, ok := row[field]; ok {
|
||||
fallbackTables = append(fallbackTables, fmt.Sprintf("%v", val))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fallbackTables, nil
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return s.MySQLDB.GetCreateStatement(s.resolveDatabaseName(dbName), tableName)
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return s.MySQLDB.GetColumns(s.resolveDatabaseName(dbName), tableName)
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return s.MySQLDB.GetAllColumns(s.resolveDatabaseName(dbName))
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return s.MySQLDB.GetIndexes(s.resolveDatabaseName(dbName), tableName)
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
fks, err := s.MySQLDB.GetForeignKeys(s.resolveDatabaseName(dbName), tableName)
|
||||
if err != nil && isSphinxUnsupportedFeatureError(err) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
}
|
||||
return fks, err
|
||||
}
|
||||
|
||||
func (s *SphinxDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
triggers, err := s.MySQLDB.GetTriggers(s.resolveDatabaseName(dbName), tableName)
|
||||
if err != nil && isSphinxUnsupportedFeatureError(err) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
}
|
||||
return triggers, err
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -17,14 +19,14 @@ type SQLiteDB struct {
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := config.Host
|
||||
dsn := config.Host
|
||||
db, err := sql.Open("sqlite", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
s.conn = db
|
||||
s.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
|
||||
// Force verification
|
||||
if err := s.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
@@ -52,6 +54,20 @@ func (s *SQLiteDB) Ping() error {
|
||||
return s.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if s.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := s.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if s.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
@@ -62,41 +78,18 @@ func (s *SQLiteDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
columns, err := rows.Columns()
|
||||
func (s *SQLiteDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if s.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := s.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Exec(query string) (int64, error) {
|
||||
@@ -120,7 +113,7 @@ func (s *SQLiteDB) GetTables(dbName string) ([]string, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["name"]; ok {
|
||||
@@ -145,21 +138,443 @@ func (s *SQLiteDB) GetCreateStatement(dbName, tableName string) (string, error)
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return []connection.ColumnDefinition{}, nil
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(v string) string { return strings.ReplaceAll(v, "'", "''") }
|
||||
|
||||
// cid, name, type, notnull, dflt_value, pk
|
||||
data, _, err := s.Query(fmt.Sprintf("PRAGMA table_info('%s')", esc(table)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parseInt := func(v interface{}) int {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
return val
|
||||
case int64:
|
||||
return int(val)
|
||||
case float64:
|
||||
return int(val)
|
||||
case string:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(val), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", v)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
getStr := func(row map[string]interface{}, key string) string {
|
||||
if v, ok := row[key]; ok && v != nil {
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
if v, ok := row[strings.ToUpper(key)]; ok && v != nil {
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
notnull := 0
|
||||
if v, ok := row["notnull"]; ok && v != nil {
|
||||
notnull = parseInt(v)
|
||||
} else if v, ok := row["NOTNULL"]; ok && v != nil {
|
||||
notnull = parseInt(v)
|
||||
}
|
||||
|
||||
pk := 0
|
||||
if v, ok := row["pk"]; ok && v != nil {
|
||||
pk = parseInt(v)
|
||||
} else if v, ok := row["PK"]; ok && v != nil {
|
||||
pk = parseInt(v)
|
||||
}
|
||||
|
||||
nullable := "YES"
|
||||
if notnull == 1 {
|
||||
nullable = "NO"
|
||||
}
|
||||
|
||||
key := ""
|
||||
if pk == 1 {
|
||||
key = "PRI"
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinition{
|
||||
Name: getStr(row, "name"),
|
||||
Type: getStr(row, "type"),
|
||||
Nullable: nullable,
|
||||
Key: key,
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if v, ok := row["dflt_value"]; ok && v != nil {
|
||||
def := fmt.Sprintf("%v", v)
|
||||
col.Default = &def
|
||||
} else if v, ok := row["DFLT_VALUE"]; ok && v != nil {
|
||||
def := fmt.Sprintf("%v", v)
|
||||
col.Default = &def
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(v string) string { return strings.ReplaceAll(v, "'", "''") }
|
||||
parseInt := func(v interface{}) int {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
return val
|
||||
case int64:
|
||||
return int(val)
|
||||
case float64:
|
||||
return int(val)
|
||||
case string:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(val), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", v)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
data, _, err := s.Query(fmt.Sprintf("PRAGMA index_list('%s')", esc(table)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
indexName := ""
|
||||
if v, ok := row["name"]; ok && v != nil {
|
||||
indexName = fmt.Sprintf("%v", v)
|
||||
} else if v, ok := row["NAME"]; ok && v != nil {
|
||||
indexName = fmt.Sprintf("%v", v)
|
||||
}
|
||||
if strings.TrimSpace(indexName) == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
unique := 0
|
||||
if v, ok := row["unique"]; ok && v != nil {
|
||||
unique = parseInt(v)
|
||||
} else if v, ok := row["UNIQUE"]; ok && v != nil {
|
||||
unique = parseInt(v)
|
||||
}
|
||||
nonUnique := 1
|
||||
if unique == 1 {
|
||||
nonUnique = 0
|
||||
}
|
||||
|
||||
cols, _, err := s.Query(fmt.Sprintf("PRAGMA index_info('%s')", esc(indexName)))
|
||||
if err != nil {
|
||||
// skip broken index
|
||||
continue
|
||||
}
|
||||
|
||||
for _, c := range cols {
|
||||
colName := ""
|
||||
if v, ok := c["name"]; ok && v != nil {
|
||||
colName = fmt.Sprintf("%v", v)
|
||||
} else if v, ok := c["NAME"]; ok && v != nil {
|
||||
colName = fmt.Sprintf("%v", v)
|
||||
}
|
||||
if strings.TrimSpace(colName) == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if v, ok := c["seqno"]; ok && v != nil {
|
||||
seq = parseInt(v) + 1
|
||||
} else if v, ok := c["SEQNO"]; ok && v != nil {
|
||||
seq = parseInt(v) + 1
|
||||
}
|
||||
|
||||
indexes = append(indexes, connection.IndexDefinition{
|
||||
Name: indexName,
|
||||
ColumnName: colName,
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: "BTREE",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(v string) string { return strings.ReplaceAll(v, "'", "''") }
|
||||
|
||||
data, _, err := s.Query(fmt.Sprintf("PRAGMA foreign_key_list('%s')", esc(table)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parseInt := func(v interface{}) int {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
return val
|
||||
case int64:
|
||||
return int(val)
|
||||
case float64:
|
||||
return int(val)
|
||||
case string:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(val), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", v)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
id := 0
|
||||
if v, ok := row["id"]; ok && v != nil {
|
||||
id = parseInt(v)
|
||||
} else if v, ok := row["ID"]; ok && v != nil {
|
||||
id = parseInt(v)
|
||||
}
|
||||
|
||||
refTable := ""
|
||||
if v, ok := row["table"]; ok && v != nil {
|
||||
refTable = fmt.Sprintf("%v", v)
|
||||
} else if v, ok := row["TABLE"]; ok && v != nil {
|
||||
refTable = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
fromCol := ""
|
||||
if v, ok := row["from"]; ok && v != nil {
|
||||
fromCol = fmt.Sprintf("%v", v)
|
||||
} else if v, ok := row["FROM"]; ok && v != nil {
|
||||
fromCol = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
toCol := ""
|
||||
if v, ok := row["to"]; ok && v != nil {
|
||||
toCol = fmt.Sprintf("%v", v)
|
||||
} else if v, ok := row["TO"]; ok && v != nil {
|
||||
toCol = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
name := fmt.Sprintf("fk_%s_%d", table, id)
|
||||
fks = append(fks, connection.ForeignKeyDefinition{
|
||||
Name: name,
|
||||
ColumnName: fromCol,
|
||||
RefTableName: refTable,
|
||||
RefColumnName: toCol,
|
||||
ConstraintName: name,
|
||||
})
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(v string) string { return strings.ReplaceAll(v, "'", "''") }
|
||||
|
||||
data, _, err := s.Query(fmt.Sprintf("SELECT name AS trigger_name, sql AS statement FROM sqlite_master WHERE type='trigger' AND tbl_name='%s' ORDER BY name", esc(table)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
name := fmt.Sprintf("%v", row["trigger_name"])
|
||||
stmt := ""
|
||||
if v, ok := row["statement"]; ok && v != nil {
|
||||
stmt = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
upper := strings.ToUpper(stmt)
|
||||
timing := ""
|
||||
switch {
|
||||
case strings.Contains(upper, " BEFORE "):
|
||||
timing = "BEFORE"
|
||||
case strings.Contains(upper, " AFTER "):
|
||||
timing = "AFTER"
|
||||
case strings.Contains(upper, " INSTEAD OF "):
|
||||
timing = "INSTEAD OF"
|
||||
}
|
||||
|
||||
event := ""
|
||||
switch {
|
||||
case strings.Contains(upper, " INSERT "):
|
||||
event = "INSERT"
|
||||
case strings.Contains(upper, " UPDATE "):
|
||||
event = "UPDATE"
|
||||
case strings.Contains(upper, " DELETE "):
|
||||
event = "DELETE"
|
||||
}
|
||||
|
||||
triggers = append(triggers, connection.TriggerDefinition{
|
||||
Name: name,
|
||||
Timing: timing,
|
||||
Event: event,
|
||||
Statement: stmt,
|
||||
})
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if s.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := s.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
for k, v := range pk {
|
||||
wheres = append(wheres, fmt.Sprintf("%s = ?", quoteIdent(k)))
|
||||
args = append(args, v)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
|
||||
for k, v := range update.Values {
|
||||
sets = append(sets, fmt.Sprintf("%s = ?", quoteIdent(k)))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
wheres = append(wheres, fmt.Sprintf("%s = ?", quoteIdent(k)))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
|
||||
for k, v := range row {
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, "?")
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return []connection.ColumnDefinitionWithTable{}, nil
|
||||
tables, err := s.GetTables(dbName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, table := range tables {
|
||||
// Skip internal tables
|
||||
if strings.HasPrefix(strings.ToLower(table), "sqlite_") {
|
||||
continue
|
||||
}
|
||||
columns, err := s.GetColumns("", table)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
for _, col := range columns {
|
||||
cols = append(cols, connection.ColumnDefinitionWithTable{
|
||||
TableName: table,
|
||||
Name: col.Name,
|
||||
Type: col.Type,
|
||||
})
|
||||
}
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
635
internal/db/sqlserver_impl.go
Normal file
635
internal/db/sqlserver_impl.go
Normal file
@@ -0,0 +1,635 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/microsoft/go-mssqldb"
|
||||
)
|
||||
|
||||
type SqlServerDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
// quoteBracket escapes ] in identifiers for safe use in SQL Server [bracket] notation
|
||||
func quoteBracket(name string) string {
|
||||
return strings.ReplaceAll(name, "]", "]]")
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// sqlserver://user:password@host:port?database=dbname
|
||||
dbname := config.Database
|
||||
if dbname == "" {
|
||||
dbname = "master"
|
||||
}
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "sqlserver",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
|
||||
q := url.Values{}
|
||||
q.Set("database", dbname)
|
||||
q.Set("connection timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
q.Set("encrypt", "disable")
|
||||
q.Set("TrustServerCertificate", "true")
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("SQL Server 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
s.forwarder = forwarder
|
||||
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = s.getDSN(localConfig)
|
||||
logger.Infof("SQL Server 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = s.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("sqlserver", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
s.conn = db
|
||||
s.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := s.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) Close() error {
|
||||
if s.forwarder != nil {
|
||||
if err := s.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 SQL Server SSH 端口转发失败:%v", err)
|
||||
}
|
||||
s.forwarder = nil
|
||||
}
|
||||
|
||||
if s.conn != nil {
|
||||
return s.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) Ping() error {
|
||||
if s.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := s.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return s.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if s.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := s.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if s.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := s.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if s.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := s.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) Exec(query string) (int64, error) {
|
||||
if s.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := s.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetDatabases() ([]string, error) {
|
||||
query := "SELECT name FROM sys.databases WHERE state_desc = 'ONLINE' ORDER BY name"
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["name"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetTables(dbName string) ([]string, error) {
|
||||
// SQL Server uses schema.table format, default schema is dbo
|
||||
safeDB := quoteBracket(dbName)
|
||||
query := fmt.Sprintf(`
|
||||
SELECT s.name AS schema_name, t.name AS table_name
|
||||
FROM [%s].sys.tables t
|
||||
JOIN [%s].sys.schemas s ON t.schema_id = s.schema_id
|
||||
WHERE t.type = 'U'
|
||||
ORDER BY s.name, t.name`, safeDB, safeDB)
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schema_name"]
|
||||
name, okName := row["table_name"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if okName {
|
||||
tables = append(tables, fmt.Sprintf("%v", name))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return fmt.Sprintf("-- SHOW CREATE TABLE not supported for SQL Server in this version.\n-- Table: %s.%s", dbName, tableName), nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema := "dbo"
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
safeDB := quoteBracket(dbName)
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
c.name AS column_name,
|
||||
t.name + CASE
|
||||
WHEN t.name IN ('varchar', 'nvarchar', 'char', 'nchar') THEN '(' + CASE WHEN c.max_length = -1 THEN 'MAX' ELSE CAST(CASE WHEN t.name IN ('nvarchar', 'nchar') THEN c.max_length / 2 ELSE c.max_length END AS VARCHAR) END + ')'
|
||||
WHEN t.name IN ('decimal', 'numeric') THEN '(' + CAST(c.precision AS VARCHAR) + ',' + CAST(c.scale AS VARCHAR) + ')'
|
||||
ELSE ''
|
||||
END AS data_type,
|
||||
CASE WHEN c.is_nullable = 1 THEN 'YES' ELSE 'NO' END AS is_nullable,
|
||||
dc.definition AS column_default,
|
||||
ep.value AS comment,
|
||||
CASE WHEN pk.column_id IS NOT NULL THEN 'PRI' ELSE '' END AS column_key,
|
||||
CASE WHEN c.is_identity = 1 THEN 'auto_increment' ELSE '' END AS extra
|
||||
FROM [%s].sys.columns c
|
||||
JOIN [%s].sys.types t ON c.user_type_id = t.user_type_id
|
||||
JOIN [%s].sys.tables tb ON c.object_id = tb.object_id
|
||||
JOIN [%s].sys.schemas s ON tb.schema_id = s.schema_id
|
||||
LEFT JOIN [%s].sys.default_constraints dc ON c.default_object_id = dc.object_id
|
||||
LEFT JOIN [%s].sys.extended_properties ep ON ep.major_id = c.object_id AND ep.minor_id = c.column_id AND ep.name = 'MS_Description'
|
||||
LEFT JOIN (
|
||||
SELECT ic.object_id, ic.column_id
|
||||
FROM [%s].sys.index_columns ic
|
||||
JOIN [%s].sys.indexes i ON ic.object_id = i.object_id AND ic.index_id = i.index_id
|
||||
WHERE i.is_primary_key = 1
|
||||
) pk ON pk.object_id = c.object_id AND pk.column_id = c.column_id
|
||||
WHERE s.name = '%s' AND tb.name = '%s'
|
||||
ORDER BY c.column_id`,
|
||||
safeDB, safeDB, safeDB, safeDB, safeDB, safeDB, safeDB, safeDB,
|
||||
esc(schema), esc(table))
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: fmt.Sprintf("%v", row["extra"]),
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if v, ok := row["comment"]; ok && v != nil {
|
||||
col.Comment = fmt.Sprintf("%v", v)
|
||||
}
|
||||
|
||||
if v, ok := row["column_default"]; ok && v != nil {
|
||||
def := fmt.Sprintf("%v", v)
|
||||
col.Default = &def
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
safeDB := quoteBracket(dbName)
|
||||
query := fmt.Sprintf(`
|
||||
SELECT s.name AS schema_name, t.name AS table_name, c.name AS column_name, tp.name AS data_type
|
||||
FROM [%s].sys.columns c
|
||||
JOIN [%s].sys.tables t ON c.object_id = t.object_id
|
||||
JOIN [%s].sys.schemas s ON t.schema_id = s.schema_id
|
||||
JOIN [%s].sys.types tp ON c.user_type_id = tp.user_type_id
|
||||
WHERE t.type = 'U'
|
||||
ORDER BY s.name, t.name, c.column_id`, safeDB, safeDB, safeDB, safeDB)
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
schema := fmt.Sprintf("%v", row["schema_name"])
|
||||
table := fmt.Sprintf("%v", row["table_name"])
|
||||
tableName := fmt.Sprintf("%s.%s", schema, table)
|
||||
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
schema := "dbo"
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
safeDB := quoteBracket(dbName)
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.name AS index_name,
|
||||
c.name AS column_name,
|
||||
i.is_unique,
|
||||
ic.key_ordinal AS seq_in_index,
|
||||
i.type_desc AS index_type
|
||||
FROM [%s].sys.indexes i
|
||||
JOIN [%s].sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
|
||||
JOIN [%s].sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
|
||||
JOIN [%s].sys.tables t ON i.object_id = t.object_id
|
||||
JOIN [%s].sys.schemas s ON t.schema_id = s.schema_id
|
||||
WHERE s.name = '%s' AND t.name = '%s' AND i.name IS NOT NULL
|
||||
ORDER BY i.name, ic.key_ordinal`,
|
||||
safeDB, safeDB, safeDB, safeDB, safeDB, esc(schema), esc(table))
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
isUnique := false
|
||||
if v, ok := row["is_unique"]; ok && v != nil {
|
||||
switch val := v.(type) {
|
||||
case bool:
|
||||
isUnique = val
|
||||
case int64:
|
||||
isUnique = val == 1
|
||||
}
|
||||
}
|
||||
|
||||
nonUnique := 1
|
||||
if isUnique {
|
||||
nonUnique = 0
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if v, ok := row["seq_in_index"]; ok && v != nil {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
seq = val
|
||||
case int64:
|
||||
seq = int(val)
|
||||
}
|
||||
}
|
||||
|
||||
indexType := "NONCLUSTERED"
|
||||
if v, ok := row["index_type"]; ok && v != nil {
|
||||
indexType = strings.ToUpper(fmt.Sprintf("%v", v))
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["index_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: indexType,
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
schema := "dbo"
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
safeDB := quoteBracket(dbName)
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
fk.name AS constraint_name,
|
||||
c.name AS column_name,
|
||||
rs.name AS foreign_schema,
|
||||
rt.name AS foreign_table,
|
||||
rc.name AS foreign_column
|
||||
FROM [%s].sys.foreign_keys fk
|
||||
JOIN [%s].sys.foreign_key_columns fkc ON fk.object_id = fkc.constraint_object_id
|
||||
JOIN [%s].sys.columns c ON fkc.parent_object_id = c.object_id AND fkc.parent_column_id = c.column_id
|
||||
JOIN [%s].sys.tables t ON fk.parent_object_id = t.object_id
|
||||
JOIN [%s].sys.schemas s ON t.schema_id = s.schema_id
|
||||
JOIN [%s].sys.tables rt ON fk.referenced_object_id = rt.object_id
|
||||
JOIN [%s].sys.schemas rs ON rt.schema_id = rs.schema_id
|
||||
JOIN [%s].sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id
|
||||
WHERE s.name = '%s' AND t.name = '%s'
|
||||
ORDER BY fk.name`,
|
||||
safeDB, safeDB, safeDB, safeDB, safeDB, safeDB, safeDB, safeDB, esc(schema), esc(table))
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
refSchema := fmt.Sprintf("%v", row["foreign_schema"])
|
||||
refTable := fmt.Sprintf("%v", row["foreign_table"])
|
||||
refTableName := fmt.Sprintf("%s.%s", refSchema, refTable)
|
||||
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
RefTableName: refTableName,
|
||||
RefColumnName: fmt.Sprintf("%v", row["foreign_column"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
schema := "dbo"
|
||||
table := strings.TrimSpace(tableName)
|
||||
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
safeDB := quoteBracket(dbName)
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tr.name AS trigger_name,
|
||||
CASE WHEN tr.is_instead_of_trigger = 1 THEN 'INSTEAD OF' ELSE 'AFTER' END AS timing,
|
||||
STUFF((
|
||||
SELECT ', ' + te.type_desc
|
||||
FROM [%s].sys.trigger_events te
|
||||
WHERE te.object_id = tr.object_id
|
||||
FOR XML PATH('')
|
||||
), 1, 2, '') AS event,
|
||||
OBJECT_DEFINITION(tr.object_id) AS statement
|
||||
FROM [%s].sys.triggers tr
|
||||
JOIN [%s].sys.tables t ON tr.parent_id = t.object_id
|
||||
JOIN [%s].sys.schemas s ON t.schema_id = s.schema_id
|
||||
WHERE s.name = '%s' AND t.name = '%s'
|
||||
ORDER BY tr.name`,
|
||||
safeDB, safeDB, safeDB, safeDB, esc(schema), esc(table))
|
||||
|
||||
data, _, err := s.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["trigger_name"]),
|
||||
Timing: fmt.Sprintf("%v", row["timing"]),
|
||||
Event: fmt.Sprintf("%v", row["event"]),
|
||||
Statement: "",
|
||||
}
|
||||
if v, ok := row["statement"]; ok && v != nil {
|
||||
trig.Statement = fmt.Sprintf("%v", v)
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (s *SqlServerDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if s.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := s.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "[]")
|
||||
n = strings.ReplaceAll(n, "]", "]]")
|
||||
if n == "" {
|
||||
return "[]"
|
||||
}
|
||||
return "[" + n + "]"
|
||||
}
|
||||
|
||||
schema := "dbo"
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, v := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = @p%d", quoteIdent(k), idx))
|
||||
args = append(args, sql.Named(fmt.Sprintf("p%d", idx), v))
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = @p%d", quoteIdent(k), idx))
|
||||
args = append(args, sql.Named(fmt.Sprintf("p%d", idx), v))
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, v := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = @p%d", quoteIdent(k), idx))
|
||||
args = append(args, sql.Named(fmt.Sprintf("p%d", idx), v))
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, v := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("@p%d", idx))
|
||||
args = append(args, sql.Named(fmt.Sprintf("p%d", idx), v))
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
398
internal/db/tdengine_impl.go
Normal file
398
internal/db/tdengine_impl.go
Normal file
@@ -0,0 +1,398 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/taosdata/driver-go/v3/taosWS"
|
||||
)
|
||||
|
||||
// TDengineDB implements Database interface for TDengine.
|
||||
// Uses taosWS driver via WebSocket (通常通过 taosAdapter 提供服务)。
|
||||
type TDengineDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
func (t *TDengineDB) getDSN(config connection.ConnectionConfig) string {
|
||||
user := strings.TrimSpace(config.User)
|
||||
if user == "" {
|
||||
user = "root"
|
||||
}
|
||||
|
||||
pass := config.Password
|
||||
dbName := strings.TrimSpace(config.Database)
|
||||
path := "/"
|
||||
if dbName != "" {
|
||||
path = "/" + dbName
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s:%s@ws(%s)%s", user, pass, net.JoinHostPort(config.Host, strconv.Itoa(config.Port)), path)
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("TDengine 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
t.forwarder = forwarder
|
||||
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
dsn = t.getDSN(localConfig)
|
||||
logger.Infof("TDengine 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = t.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("taosWS", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
t.conn = db
|
||||
t.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := t.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Close() error {
|
||||
if t.forwarder != nil {
|
||||
if err := t.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 TDengine SSH 端口转发失败:%v", err)
|
||||
}
|
||||
t.forwarder = nil
|
||||
}
|
||||
|
||||
if t.conn != nil {
|
||||
return t.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Ping() error {
|
||||
if t.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := t.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return t.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (t *TDengineDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if t.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := t.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if t.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := t.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (t *TDengineDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if t.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := t.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (t *TDengineDB) Exec(query string) (int64, error) {
|
||||
if t.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := t.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := t.Query("SHOW DATABASES")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := getValueFromRow(row, "name", "database", "Database", "db_name"); ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, val := range row {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
break
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetTables(dbName string) ([]string, error) {
|
||||
queries := make([]string, 0, 2)
|
||||
if strings.TrimSpace(dbName) != "" {
|
||||
queries = append(queries, fmt.Sprintf("SHOW TABLES FROM `%s`", escapeBacktickIdent(dbName)))
|
||||
}
|
||||
queries = append(queries, "SHOW TABLES")
|
||||
|
||||
var lastErr error
|
||||
for _, query := range queries {
|
||||
data, _, err := t.Query(query)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := getValueFromRow(row, "table_name", "tablename", "name", "Table", "table"); ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, val := range row {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
break
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
if lastErr != nil {
|
||||
return nil, lastErr
|
||||
}
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
qualified := quoteTDengineTable(dbName, tableName)
|
||||
queries := []string{
|
||||
fmt.Sprintf("SHOW CREATE TABLE %s", qualified),
|
||||
fmt.Sprintf("SHOW CREATE STABLE %s", qualified),
|
||||
}
|
||||
|
||||
var lastErr error
|
||||
for _, query := range queries {
|
||||
data, _, err := t.Query(query)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
if len(data) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
row := data[0]
|
||||
if val, ok := getValueFromRow(row, "Create Table", "create table", "Create Stable", "create stable", "SQL", "sql"); ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
}
|
||||
|
||||
longest := ""
|
||||
for _, val := range row {
|
||||
text := fmt.Sprintf("%v", val)
|
||||
if strings.Contains(strings.ToUpper(text), "CREATE ") && len(text) > len(longest) {
|
||||
longest = text
|
||||
}
|
||||
}
|
||||
if longest != "" {
|
||||
return longest, nil
|
||||
}
|
||||
}
|
||||
|
||||
if lastErr != nil {
|
||||
return "", lastErr
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
query := fmt.Sprintf("DESCRIBE %s", quoteTDengineTable(dbName, tableName))
|
||||
data, _, err := t.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
columns := make([]connection.ColumnDefinition, 0, len(data))
|
||||
for _, row := range data {
|
||||
name, _ := getValueFromRow(row, "Field", "field", "col_name", "column_name", "name")
|
||||
colType, _ := getValueFromRow(row, "Type", "type", "data_type")
|
||||
note, _ := getValueFromRow(row, "Note", "note", "Extra", "extra")
|
||||
nullable, okNull := getValueFromRow(row, "Null", "null", "nullable")
|
||||
comment, _ := getValueFromRow(row, "Comment", "comment")
|
||||
defaultVal, hasDefault := getValueFromRow(row, "Default", "default")
|
||||
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", name),
|
||||
Type: fmt.Sprintf("%v", colType),
|
||||
Nullable: "YES",
|
||||
Key: "",
|
||||
Extra: fmt.Sprintf("%v", note),
|
||||
Comment: fmt.Sprintf("%v", comment),
|
||||
}
|
||||
|
||||
if okNull {
|
||||
col.Nullable = strings.ToUpper(fmt.Sprintf("%v", nullable))
|
||||
}
|
||||
|
||||
noteUpper := strings.ToUpper(fmt.Sprintf("%v", note))
|
||||
if strings.Contains(noteUpper, "TAG") {
|
||||
col.Key = "TAG"
|
||||
}
|
||||
|
||||
if hasDefault && defaultVal != nil {
|
||||
def := fmt.Sprintf("%v", defaultVal)
|
||||
if def != "<nil>" {
|
||||
col.Default = &def
|
||||
}
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
if strings.TrimSpace(dbName) == "" {
|
||||
return nil, fmt.Errorf("database name required for GetAllColumns")
|
||||
}
|
||||
|
||||
tables, err := t.GetTables(dbName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cols := make([]connection.ColumnDefinitionWithTable, 0)
|
||||
for _, table := range tables {
|
||||
tableCols, err := t.GetColumns(dbName, table)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
for _, col := range tableCols {
|
||||
cols = append(cols, connection.ColumnDefinitionWithTable{
|
||||
TableName: table,
|
||||
Name: col.Name,
|
||||
Type: col.Type,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
}
|
||||
|
||||
func (t *TDengineDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
}
|
||||
|
||||
func getValueFromRow(row map[string]interface{}, keys ...string) (interface{}, bool) {
|
||||
if len(row) == 0 {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
if val, ok := row[key]; ok {
|
||||
return val, true
|
||||
}
|
||||
}
|
||||
|
||||
for existingKey, val := range row {
|
||||
for _, key := range keys {
|
||||
if strings.EqualFold(existingKey, key) {
|
||||
return val, true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func escapeBacktickIdent(ident string) string {
|
||||
return strings.ReplaceAll(strings.TrimSpace(ident), "`", "``")
|
||||
}
|
||||
|
||||
func quoteTDengineTable(dbName, tableName string) string {
|
||||
t := escapeBacktickIdent(tableName)
|
||||
if t == "" {
|
||||
return "``"
|
||||
}
|
||||
if strings.Contains(t, ".") {
|
||||
parts := strings.Split(t, ".")
|
||||
quoted := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
quoted = append(quoted, fmt.Sprintf("`%s`", escapeBacktickIdent(part)))
|
||||
}
|
||||
if len(quoted) > 0 {
|
||||
return strings.Join(quoted, ".")
|
||||
}
|
||||
}
|
||||
|
||||
db := escapeBacktickIdent(dbName)
|
||||
if db == "" {
|
||||
return fmt.Sprintf("`%s`", t)
|
||||
}
|
||||
return fmt.Sprintf("`%s`.`%s`", db, t)
|
||||
}
|
||||
627
internal/db/vastbase_impl.go
Normal file
627
internal/db/vastbase_impl.go
Normal file
@@ -0,0 +1,627 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/lib/pq" // Vastbase is PostgreSQL compatible
|
||||
)
|
||||
|
||||
// VastbaseDB implements Database interface for Vastbase (海量) database
|
||||
// Vastbase is a PostgreSQL-compatible database, so we reuse PostgreSQL driver
|
||||
type VastbaseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
dbname := config.Database
|
||||
if dbname == "" {
|
||||
dbname = "vastbase" // Vastbase default database
|
||||
}
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "postgres",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
Path: "/" + dbname,
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
|
||||
if config.UseSSH {
|
||||
logger.Infof("Vastbase 使用 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
v.forwarder = forwarder
|
||||
|
||||
host, portStr, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
localConfig := config
|
||||
localConfig.Host = host
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false
|
||||
|
||||
dsn = v.getDSN(localConfig)
|
||||
logger.Infof("Vastbase 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = v.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
v.conn = db
|
||||
v.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
if err := v.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Close() error {
|
||||
if v.forwarder != nil {
|
||||
if err := v.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 Vastbase SSH 端口转发失败:%v", err)
|
||||
}
|
||||
v.forwarder = nil
|
||||
}
|
||||
|
||||
if v.conn != nil {
|
||||
return v.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Ping() error {
|
||||
if v.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := v.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return v.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if v.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := v.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if v.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := v.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if v.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := v.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) Exec(query string) (int64, error) {
|
||||
if v.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := v.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := v.Query("SELECT datname FROM pg_database WHERE datistemplate = false")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["datname"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetTables(dbName string) ([]string, error) {
|
||||
query := "SELECT schemaname, tablename FROM pg_catalog.pg_tables WHERE schemaname != 'information_schema' AND schemaname NOT LIKE 'pg_%' ORDER BY schemaname, tablename"
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if okName {
|
||||
tables = append(tables, fmt.Sprintf("%v", name))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return fmt.Sprintf("-- SHOW CREATE TABLE not fully supported for Vastbase in this version.\n-- Table: %s", tableName), nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
a.attname AS column_name,
|
||||
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
||||
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
|
||||
col_description(a.attrelid, a.attnum) AS comment,
|
||||
CASE WHEN pk.attname IS NOT NULL THEN 'PRI' ELSE '' END AS column_key
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.adrelid = c.oid AND ad.adnum = a.attnum
|
||||
LEFT JOIN (
|
||||
SELECT i.indrelid, a3.attname
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a3 ON a3.attrelid = i.indrelid AND a3.attnum = ANY(i.indkey)
|
||||
WHERE i.indisprimary
|
||||
) pk ON pk.indrelid = c.oid AND pk.attname = a.attname
|
||||
WHERE c.relkind IN ('r', 'p')
|
||||
AND n.nspname = '%s'
|
||||
AND c.relname = '%s'
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
ORDER BY a.attnum`, esc(schema), esc(table))
|
||||
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
Key: fmt.Sprintf("%v", row["column_key"]),
|
||||
Extra: "",
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
if val, ok := row["comment"]; ok && val != nil {
|
||||
col.Comment = fmt.Sprintf("%v", val)
|
||||
}
|
||||
|
||||
if val, ok := row["column_default"]; ok && val != nil {
|
||||
def := fmt.Sprintf("%v", val)
|
||||
col.Default = &def
|
||||
if strings.HasPrefix(strings.ToLower(strings.TrimSpace(def)), "nextval(") {
|
||||
col.Extra = "auto_increment"
|
||||
}
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique,
|
||||
x.ordinality AS seq_in_index,
|
||||
am.amname AS index_type
|
||||
FROM pg_class t
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_am am ON i.relam = am.oid
|
||||
JOIN unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinality) ON TRUE
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = x.attnum
|
||||
WHERE t.relkind IN ('r', 'p')
|
||||
AND t.relname = '%s'
|
||||
AND n.nspname = '%s'
|
||||
ORDER BY i.relname, x.ordinality`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parseBool := func(val interface{}) bool {
|
||||
switch v := val.(type) {
|
||||
case bool:
|
||||
return v
|
||||
case string:
|
||||
s := strings.ToLower(strings.TrimSpace(v))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
default:
|
||||
s := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", val)))
|
||||
return s == "t" || s == "true" || s == "1" || s == "y" || s == "yes"
|
||||
}
|
||||
}
|
||||
|
||||
parseInt := func(val interface{}) int {
|
||||
switch v := val.(type) {
|
||||
case int:
|
||||
return v
|
||||
case int64:
|
||||
return int(v)
|
||||
case float64:
|
||||
return int(v)
|
||||
case string:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(v), "%d", &n)
|
||||
return n
|
||||
default:
|
||||
var n int
|
||||
_, _ = fmt.Sscanf(strings.TrimSpace(fmt.Sprintf("%v", val)), "%d", &n)
|
||||
return n
|
||||
}
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
isUnique := false
|
||||
if val, ok := row["is_unique"]; ok && val != nil {
|
||||
isUnique = parseBool(val)
|
||||
}
|
||||
|
||||
nonUnique := 1
|
||||
if isUnique {
|
||||
nonUnique = 0
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if val, ok := row["seq_in_index"]; ok && val != nil {
|
||||
seq = parseInt(val)
|
||||
}
|
||||
|
||||
indexType := ""
|
||||
if val, ok := row["index_type"]; ok && val != nil {
|
||||
indexType = strings.ToUpper(fmt.Sprintf("%v", val))
|
||||
}
|
||||
if indexType == "" {
|
||||
indexType = "BTREE"
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["index_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: seq,
|
||||
IndexType: indexType,
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name AS constraint_name,
|
||||
kcu.column_name AS column_name,
|
||||
ccu.table_schema AS foreign_table_schema,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_name = '%s'
|
||||
AND tc.table_schema = '%s'
|
||||
ORDER BY tc.constraint_name, kcu.ordinal_position`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
refSchema := ""
|
||||
if val, ok := row["foreign_table_schema"]; ok && val != nil {
|
||||
refSchema = fmt.Sprintf("%v", val)
|
||||
}
|
||||
refTable := fmt.Sprintf("%v", row["foreign_table_name"])
|
||||
refTableName := refTable
|
||||
if strings.TrimSpace(refSchema) != "" {
|
||||
refTableName = fmt.Sprintf("%s.%s", refSchema, refTable)
|
||||
}
|
||||
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
RefTableName: refTableName,
|
||||
RefColumnName: fmt.Sprintf("%v", row["foreign_column_name"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
schema := strings.TrimSpace(dbName)
|
||||
if schema == "" {
|
||||
schema = "public"
|
||||
}
|
||||
table := strings.TrimSpace(tableName)
|
||||
if table == "" {
|
||||
return nil, fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
esc := func(s string) string { return strings.ReplaceAll(s, "'", "''") }
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT trigger_name, action_timing, event_manipulation, action_statement
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s'
|
||||
AND event_object_schema = '%s'
|
||||
ORDER BY trigger_name, event_manipulation`, esc(table), esc(schema))
|
||||
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["trigger_name"]),
|
||||
Timing: fmt.Sprintf("%v", row["action_timing"]),
|
||||
Event: fmt.Sprintf("%v", row["event_manipulation"]),
|
||||
Statement: fmt.Sprintf("%v", row["action_statement"]),
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := `
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name, ordinal_position`
|
||||
|
||||
data, _, err := v.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
schema := fmt.Sprintf("%v", row["table_schema"])
|
||||
table := fmt.Sprintf("%v", row["table_name"])
|
||||
tableName := table
|
||||
if strings.TrimSpace(schema) != "" {
|
||||
tableName = fmt.Sprintf("%s.%s", schema, table)
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
func (v *VastbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
if v.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
tx, err := v.conn.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
quoteIdent := func(name string) string {
|
||||
n := strings.TrimSpace(name)
|
||||
n = strings.Trim(n, "\"")
|
||||
n = strings.ReplaceAll(n, "\"", "\"\"")
|
||||
if n == "" {
|
||||
return "\"\""
|
||||
}
|
||||
return `"` + n + `"`
|
||||
}
|
||||
|
||||
schema := ""
|
||||
table := strings.TrimSpace(tableName)
|
||||
if parts := strings.SplitN(table, ".", 2); len(parts) == 2 {
|
||||
schema = strings.TrimSpace(parts[0])
|
||||
table = strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
qualifiedTable := ""
|
||||
if schema != "" {
|
||||
qualifiedTable = fmt.Sprintf("%s.%s", quoteIdent(schema), quoteIdent(table))
|
||||
} else {
|
||||
qualifiedTable = quoteIdent(table)
|
||||
}
|
||||
|
||||
// 1. Deletes
|
||||
for _, pk := range changes.Deletes {
|
||||
var wheres []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
for k, val := range pk {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, val)
|
||||
}
|
||||
if len(wheres) == 0 {
|
||||
continue
|
||||
}
|
||||
query := fmt.Sprintf("DELETE FROM %s WHERE %s", qualifiedTable, strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("delete error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Updates
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, val := range update.Values {
|
||||
idx++
|
||||
sets = append(sets, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, val)
|
||||
}
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
var wheres []string
|
||||
for k, val := range update.Keys {
|
||||
idx++
|
||||
wheres = append(wheres, fmt.Sprintf("%s = $%d", quoteIdent(k), idx))
|
||||
args = append(args, val)
|
||||
}
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("UPDATE %s SET %s WHERE %s", qualifiedTable, strings.Join(sets, ", "), strings.Join(wheres, " AND "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("update error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Inserts
|
||||
for _, row := range changes.Inserts {
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
idx := 0
|
||||
|
||||
for k, val := range row {
|
||||
idx++
|
||||
cols = append(cols, quoteIdent(k))
|
||||
placeholders = append(placeholders, fmt.Sprintf("$%d", idx))
|
||||
args = append(args, val)
|
||||
}
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", qualifiedTable, strings.Join(cols, ", "), strings.Join(placeholders, ", "))
|
||||
if _, err := tx.Exec(query, args...); err != nil {
|
||||
return fmt.Errorf("insert error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
101
internal/redis/redis.go
Normal file
101
internal/redis/redis.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package redis
|
||||
|
||||
import "GoNavi-Wails/internal/connection"
|
||||
|
||||
// RedisValue represents a Redis value with its type and metadata
|
||||
type RedisValue struct {
|
||||
Type string `json:"type"` // string, hash, list, set, zset, stream
|
||||
TTL int64 `json:"ttl"` // TTL in seconds, -1 means no expiry, -2 means key doesn't exist
|
||||
Value interface{} `json:"value"` // The actual value
|
||||
Length int64 `json:"length"` // Length/size of the value
|
||||
}
|
||||
|
||||
// RedisDBInfo represents information about a Redis database
|
||||
type RedisDBInfo struct {
|
||||
Index int `json:"index"` // Database index (0-15)
|
||||
Keys int64 `json:"keys"` // Number of keys in this database
|
||||
}
|
||||
|
||||
// RedisKeyInfo represents information about a Redis key
|
||||
type RedisKeyInfo struct {
|
||||
Key string `json:"key"`
|
||||
Type string `json:"type"`
|
||||
TTL int64 `json:"ttl"`
|
||||
}
|
||||
|
||||
// RedisScanResult represents the result of a SCAN operation
|
||||
type RedisScanResult struct {
|
||||
Keys []RedisKeyInfo `json:"keys"`
|
||||
Cursor uint64 `json:"cursor"`
|
||||
}
|
||||
|
||||
// RedisClient defines the interface for Redis operations
|
||||
type RedisClient interface {
|
||||
// Connection management
|
||||
Connect(config connection.ConnectionConfig) error
|
||||
Close() error
|
||||
Ping() error
|
||||
|
||||
// Key operations
|
||||
ScanKeys(pattern string, cursor uint64, count int64) (*RedisScanResult, error)
|
||||
GetKeyType(key string) (string, error)
|
||||
GetTTL(key string) (int64, error)
|
||||
SetTTL(key string, ttl int64) error
|
||||
DeleteKeys(keys []string) (int64, error)
|
||||
RenameKey(oldKey, newKey string) error
|
||||
KeyExists(key string) (bool, error)
|
||||
|
||||
// Value operations
|
||||
GetValue(key string) (*RedisValue, error)
|
||||
|
||||
// String operations
|
||||
GetString(key string) (string, error)
|
||||
SetString(key, value string, ttl int64) error
|
||||
|
||||
// Hash operations
|
||||
GetHash(key string) (map[string]string, error)
|
||||
SetHashField(key, field, value string) error
|
||||
DeleteHashField(key string, fields ...string) error
|
||||
|
||||
// List operations
|
||||
GetList(key string, start, stop int64) ([]string, error)
|
||||
ListPush(key string, values ...string) error
|
||||
ListSet(key string, index int64, value string) error
|
||||
|
||||
// Set operations
|
||||
GetSet(key string) ([]string, error)
|
||||
SetAdd(key string, members ...string) error
|
||||
SetRemove(key string, members ...string) error
|
||||
|
||||
// Sorted Set operations
|
||||
GetZSet(key string, start, stop int64) ([]ZSetMember, error)
|
||||
ZSetAdd(key string, members ...ZSetMember) error
|
||||
ZSetRemove(key string, members ...string) error
|
||||
|
||||
// Stream operations
|
||||
GetStream(key, start, stop string, count int64) ([]StreamEntry, error)
|
||||
StreamAdd(key string, fields map[string]string, id string) (string, error)
|
||||
StreamDelete(key string, ids ...string) (int64, error)
|
||||
|
||||
// Command execution
|
||||
ExecuteCommand(args []string) (interface{}, error)
|
||||
|
||||
// Server information
|
||||
GetServerInfo() (map[string]string, error)
|
||||
GetDatabases() ([]RedisDBInfo, error)
|
||||
SelectDB(index int) error
|
||||
GetCurrentDB() int
|
||||
FlushDB() error
|
||||
}
|
||||
|
||||
// ZSetMember represents a member in a sorted set
|
||||
type ZSetMember struct {
|
||||
Member string `json:"member"`
|
||||
Score float64 `json:"score"`
|
||||
}
|
||||
|
||||
// StreamEntry represents a single stream message
|
||||
type StreamEntry struct {
|
||||
ID string `json:"id"`
|
||||
Fields map[string]string `json:"fields"`
|
||||
}
|
||||
816
internal/redis/redis_impl.go
Normal file
816
internal/redis/redis_impl.go
Normal file
@@ -0,0 +1,816 @@
|
||||
package redis
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
// RedisClientImpl implements RedisClient using go-redis
|
||||
type RedisClientImpl struct {
|
||||
client *redis.Client
|
||||
config connection.ConnectionConfig
|
||||
currentDB int
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
// NewRedisClient creates a new Redis client instance
|
||||
func NewRedisClient() RedisClient {
|
||||
return &RedisClientImpl{}
|
||||
}
|
||||
|
||||
// Connect establishes a connection to Redis
|
||||
func (r *RedisClientImpl) Connect(config connection.ConnectionConfig) error {
|
||||
r.config = config
|
||||
r.currentDB = config.RedisDB
|
||||
|
||||
addr := fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
|
||||
// Handle SSH tunnel if enabled
|
||||
if config.UseSSH {
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(config.SSH, config.Host, config.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败: %w", err)
|
||||
}
|
||||
r.forwarder = forwarder
|
||||
addr = forwarder.LocalAddr
|
||||
logger.Infof("Redis 通过 SSH 隧道连接: %s -> %s:%d", addr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
opts := &redis.Options{
|
||||
Addr: addr,
|
||||
Password: config.Password,
|
||||
DB: config.RedisDB,
|
||||
DialTimeout: time.Duration(config.Timeout) * time.Second,
|
||||
ReadTimeout: time.Duration(config.Timeout) * time.Second,
|
||||
WriteTimeout: time.Duration(config.Timeout) * time.Second,
|
||||
}
|
||||
|
||||
if opts.DialTimeout == 0 {
|
||||
opts.DialTimeout = 30 * time.Second
|
||||
opts.ReadTimeout = 30 * time.Second
|
||||
opts.WriteTimeout = 30 * time.Second
|
||||
}
|
||||
|
||||
r.client = redis.NewClient(opts)
|
||||
|
||||
// Test connection
|
||||
ctx, cancel := context.WithTimeout(context.Background(), opts.DialTimeout)
|
||||
defer cancel()
|
||||
|
||||
if err := r.client.Ping(ctx).Err(); err != nil {
|
||||
r.client.Close()
|
||||
r.client = nil
|
||||
return fmt.Errorf("Redis 连接失败: %w", err)
|
||||
}
|
||||
|
||||
logger.Infof("Redis 连接成功: %s DB=%d", addr, config.RedisDB)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close closes the Redis connection
|
||||
func (r *RedisClientImpl) Close() error {
|
||||
if r.client != nil {
|
||||
err := r.client.Close()
|
||||
r.client = nil
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ping tests the connection
|
||||
func (r *RedisClientImpl) Ping() error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.Ping(ctx).Err()
|
||||
}
|
||||
|
||||
// ScanKeys scans keys matching a pattern
|
||||
func (r *RedisClientImpl) ScanKeys(pattern string, cursor uint64, count int64) (*RedisScanResult, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if pattern == "" {
|
||||
pattern = "*"
|
||||
}
|
||||
if count <= 0 {
|
||||
count = 100
|
||||
}
|
||||
|
||||
keys, nextCursor, err := r.client.Scan(ctx, cursor, pattern, count).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := &RedisScanResult{
|
||||
Keys: make([]RedisKeyInfo, 0, len(keys)),
|
||||
Cursor: nextCursor,
|
||||
}
|
||||
|
||||
// Get type and TTL for each key
|
||||
pipe := r.client.Pipeline()
|
||||
typeResults := make([]*redis.StatusCmd, len(keys))
|
||||
ttlResults := make([]*redis.DurationCmd, len(keys))
|
||||
|
||||
for i, key := range keys {
|
||||
typeResults[i] = pipe.Type(ctx, key)
|
||||
ttlResults[i] = pipe.TTL(ctx, key)
|
||||
}
|
||||
|
||||
_, err = pipe.Exec(ctx)
|
||||
if err != nil && err != redis.Nil {
|
||||
// Fallback: get info one by one
|
||||
for _, key := range keys {
|
||||
keyType, _ := r.GetKeyType(key)
|
||||
ttl, _ := r.GetTTL(key)
|
||||
result.Keys = append(result.Keys, RedisKeyInfo{
|
||||
Key: key,
|
||||
Type: keyType,
|
||||
TTL: ttl,
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
for i, key := range keys {
|
||||
keyType := typeResults[i].Val()
|
||||
ttl := int64(ttlResults[i].Val().Seconds())
|
||||
if ttlResults[i].Val() == -1 {
|
||||
ttl = -1
|
||||
} else if ttlResults[i].Val() == -2 {
|
||||
ttl = -2
|
||||
}
|
||||
result.Keys = append(result.Keys, RedisKeyInfo{
|
||||
Key: key,
|
||||
Type: keyType,
|
||||
TTL: ttl,
|
||||
})
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetKeyType returns the type of a key
|
||||
func (r *RedisClientImpl) GetKeyType(key string) (string, error) {
|
||||
if r.client == nil {
|
||||
return "", fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.Type(ctx, key).Result()
|
||||
}
|
||||
|
||||
// GetTTL returns the TTL of a key in seconds
|
||||
func (r *RedisClientImpl) GetTTL(key string) (int64, error) {
|
||||
if r.client == nil {
|
||||
return 0, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
ttl, err := r.client.TTL(ctx, key).Result()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if ttl == -1 {
|
||||
return -1, nil // No expiry
|
||||
} else if ttl == -2 {
|
||||
return -2, nil // Key doesn't exist
|
||||
}
|
||||
return int64(ttl.Seconds()), nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL of a key
|
||||
func (r *RedisClientImpl) SetTTL(key string, ttl int64) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if ttl < 0 {
|
||||
// Remove expiry
|
||||
return r.client.Persist(ctx, key).Err()
|
||||
}
|
||||
return r.client.Expire(ctx, key, time.Duration(ttl)*time.Second).Err()
|
||||
}
|
||||
|
||||
// DeleteKeys deletes one or more keys
|
||||
func (r *RedisClientImpl) DeleteKeys(keys []string) (int64, error) {
|
||||
if r.client == nil {
|
||||
return 0, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
return r.client.Del(ctx, keys...).Result()
|
||||
}
|
||||
|
||||
// RenameKey renames a key
|
||||
func (r *RedisClientImpl) RenameKey(oldKey, newKey string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.Rename(ctx, oldKey, newKey).Err()
|
||||
}
|
||||
|
||||
// KeyExists checks if a key exists
|
||||
func (r *RedisClientImpl) KeyExists(key string) (bool, error) {
|
||||
if r.client == nil {
|
||||
return false, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
n, err := r.client.Exists(ctx, key).Result()
|
||||
return n > 0, err
|
||||
}
|
||||
|
||||
// GetValue gets the value of a key with automatic type detection
|
||||
func (r *RedisClientImpl) GetValue(key string) (*RedisValue, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
|
||||
keyType, err := r.GetKeyType(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ttl, _ := r.GetTTL(key)
|
||||
|
||||
result := &RedisValue{
|
||||
Type: keyType,
|
||||
TTL: ttl,
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
switch keyType {
|
||||
case "string":
|
||||
val, err := r.client.Get(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Value = val
|
||||
result.Length = int64(len(val))
|
||||
|
||||
case "hash":
|
||||
val, err := r.client.HGetAll(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Value = val
|
||||
result.Length = int64(len(val))
|
||||
|
||||
case "list":
|
||||
length, err := r.client.LLen(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Get first 1000 items
|
||||
limit := int64(1000)
|
||||
if length < limit {
|
||||
limit = length
|
||||
}
|
||||
val, err := r.client.LRange(ctx, key, 0, limit-1).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Value = val
|
||||
result.Length = length
|
||||
|
||||
case "set":
|
||||
length, err := r.client.SCard(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Get members using SMembers (limited by Redis server)
|
||||
members, err := r.client.SMembers(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Value = members
|
||||
result.Length = length
|
||||
|
||||
case "zset":
|
||||
length, err := r.client.ZCard(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Get first 1000 members with scores
|
||||
limit := int64(1000)
|
||||
if length < limit {
|
||||
limit = length
|
||||
}
|
||||
val, err := r.client.ZRangeWithScores(ctx, key, 0, limit-1).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
members := make([]ZSetMember, len(val))
|
||||
for i, z := range val {
|
||||
members[i] = ZSetMember{
|
||||
Member: z.Member.(string),
|
||||
Score: z.Score,
|
||||
}
|
||||
}
|
||||
result.Value = members
|
||||
result.Length = length
|
||||
|
||||
case "stream":
|
||||
length, err := r.client.XLen(ctx, key).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Length = length
|
||||
if length == 0 {
|
||||
result.Value = []StreamEntry{}
|
||||
break
|
||||
}
|
||||
limit := int64(1000)
|
||||
if length < limit {
|
||||
limit = length
|
||||
}
|
||||
val, err := r.client.XRangeN(ctx, key, "-", "+", limit).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Value = toStreamEntries(val)
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("不支持的 Redis 数据类型: %s", keyType)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetString gets a string value
|
||||
func (r *RedisClientImpl) GetString(key string) (string, error) {
|
||||
if r.client == nil {
|
||||
return "", fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.Get(ctx, key).Result()
|
||||
}
|
||||
|
||||
// SetString sets a string value with optional TTL
|
||||
func (r *RedisClientImpl) SetString(key, value string, ttl int64) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
var expiration time.Duration
|
||||
if ttl > 0 {
|
||||
expiration = time.Duration(ttl) * time.Second
|
||||
}
|
||||
return r.client.Set(ctx, key, value, expiration).Err()
|
||||
}
|
||||
|
||||
// GetHash gets all fields of a hash
|
||||
func (r *RedisClientImpl) GetHash(key string) (map[string]string, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
return r.client.HGetAll(ctx, key).Result()
|
||||
}
|
||||
|
||||
// SetHashField sets a field in a hash
|
||||
func (r *RedisClientImpl) SetHashField(key, field, value string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.HSet(ctx, key, field, value).Err()
|
||||
}
|
||||
|
||||
// DeleteHashField deletes fields from a hash
|
||||
func (r *RedisClientImpl) DeleteHashField(key string, fields ...string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.HDel(ctx, key, fields...).Err()
|
||||
}
|
||||
|
||||
// GetList gets a range of elements from a list
|
||||
func (r *RedisClientImpl) GetList(key string, start, stop int64) ([]string, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
return r.client.LRange(ctx, key, start, stop).Result()
|
||||
}
|
||||
|
||||
// ListPush pushes values to the end of a list
|
||||
func (r *RedisClientImpl) ListPush(key string, values ...string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
args := make([]interface{}, len(values))
|
||||
for i, v := range values {
|
||||
args[i] = v
|
||||
}
|
||||
return r.client.RPush(ctx, key, args...).Err()
|
||||
}
|
||||
|
||||
// ListSet sets the value at an index in a list
|
||||
func (r *RedisClientImpl) ListSet(key string, index int64, value string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.LSet(ctx, key, index, value).Err()
|
||||
}
|
||||
|
||||
// GetSet gets all members of a set
|
||||
func (r *RedisClientImpl) GetSet(key string) ([]string, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
return r.client.SMembers(ctx, key).Result()
|
||||
}
|
||||
|
||||
// SetAdd adds members to a set
|
||||
func (r *RedisClientImpl) SetAdd(key string, members ...string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
args := make([]interface{}, len(members))
|
||||
for i, m := range members {
|
||||
args[i] = m
|
||||
}
|
||||
return r.client.SAdd(ctx, key, args...).Err()
|
||||
}
|
||||
|
||||
// SetRemove removes members from a set
|
||||
func (r *RedisClientImpl) SetRemove(key string, members ...string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
args := make([]interface{}, len(members))
|
||||
for i, m := range members {
|
||||
args[i] = m
|
||||
}
|
||||
return r.client.SRem(ctx, key, args...).Err()
|
||||
}
|
||||
|
||||
// GetZSet gets members with scores from a sorted set
|
||||
func (r *RedisClientImpl) GetZSet(key string, start, stop int64) ([]ZSetMember, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
val, err := r.client.ZRangeWithScores(ctx, key, start, stop).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
members := make([]ZSetMember, len(val))
|
||||
for i, z := range val {
|
||||
members[i] = ZSetMember{
|
||||
Member: z.Member.(string),
|
||||
Score: z.Score,
|
||||
}
|
||||
}
|
||||
return members, nil
|
||||
}
|
||||
|
||||
// ZSetAdd adds members to a sorted set
|
||||
func (r *RedisClientImpl) ZSetAdd(key string, members ...ZSetMember) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
zMembers := make([]redis.Z, len(members))
|
||||
for i, m := range members {
|
||||
zMembers[i] = redis.Z{
|
||||
Score: m.Score,
|
||||
Member: m.Member,
|
||||
}
|
||||
}
|
||||
return r.client.ZAdd(ctx, key, zMembers...).Err()
|
||||
}
|
||||
|
||||
// ZSetRemove removes members from a sorted set
|
||||
func (r *RedisClientImpl) ZSetRemove(key string, members ...string) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
args := make([]interface{}, len(members))
|
||||
for i, m := range members {
|
||||
args[i] = m
|
||||
}
|
||||
return r.client.ZRem(ctx, key, args...).Err()
|
||||
}
|
||||
|
||||
// GetStream gets stream entries in a range
|
||||
func (r *RedisClientImpl) GetStream(key, start, stop string, count int64) ([]StreamEntry, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
if start == "" {
|
||||
start = "-"
|
||||
}
|
||||
if stop == "" {
|
||||
stop = "+"
|
||||
}
|
||||
if count <= 0 {
|
||||
count = 1000
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
val, err := r.client.XRangeN(ctx, key, start, stop, count).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return toStreamEntries(val), nil
|
||||
}
|
||||
|
||||
// StreamAdd adds an entry to a stream
|
||||
func (r *RedisClientImpl) StreamAdd(key string, fields map[string]string, id string) (string, error) {
|
||||
if r.client == nil {
|
||||
return "", fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
if len(fields) == 0 {
|
||||
return "", fmt.Errorf("Stream 字段不能为空")
|
||||
}
|
||||
if id == "" {
|
||||
id = "*"
|
||||
}
|
||||
|
||||
values := make(map[string]interface{}, len(fields))
|
||||
for field, value := range fields {
|
||||
values[field] = value
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
newID, err := r.client.XAdd(ctx, &redis.XAddArgs{
|
||||
Stream: key,
|
||||
ID: id,
|
||||
Values: values,
|
||||
}).Result()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return newID, nil
|
||||
}
|
||||
|
||||
// StreamDelete deletes entries from a stream by IDs
|
||||
func (r *RedisClientImpl) StreamDelete(key string, ids ...string) (int64, error) {
|
||||
if r.client == nil {
|
||||
return 0, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
return 0, fmt.Errorf("Stream ID 不能为空")
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
return r.client.XDel(ctx, key, ids...).Result()
|
||||
}
|
||||
|
||||
func toStreamEntries(messages []redis.XMessage) []StreamEntry {
|
||||
entries := make([]StreamEntry, 0, len(messages))
|
||||
for _, msg := range messages {
|
||||
fields := make(map[string]string, len(msg.Values))
|
||||
for field, value := range msg.Values {
|
||||
fields[field] = fmt.Sprint(value)
|
||||
}
|
||||
entries = append(entries, StreamEntry{
|
||||
ID: msg.ID,
|
||||
Fields: fields,
|
||||
})
|
||||
}
|
||||
return entries
|
||||
}
|
||||
|
||||
// ExecuteCommand executes a raw Redis command
|
||||
func (r *RedisClientImpl) ExecuteCommand(args []string) (interface{}, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
if len(args) == 0 {
|
||||
return nil, fmt.Errorf("命令不能为空")
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Convert to []interface{}
|
||||
cmdArgs := make([]interface{}, len(args))
|
||||
for i, arg := range args {
|
||||
cmdArgs[i] = arg
|
||||
}
|
||||
|
||||
result, err := r.client.Do(ctx, cmdArgs...).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return formatCommandResult(result), nil
|
||||
}
|
||||
|
||||
// formatCommandResult formats the command result for display
|
||||
func formatCommandResult(result interface{}) interface{} {
|
||||
switch v := result.(type) {
|
||||
case []interface{}:
|
||||
formatted := make([]interface{}, len(v))
|
||||
for i, item := range v {
|
||||
formatted[i] = formatCommandResult(item)
|
||||
}
|
||||
return formatted
|
||||
case []byte:
|
||||
return string(v)
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
// GetServerInfo returns server information
|
||||
func (r *RedisClientImpl) GetServerInfo() (map[string]string, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
info, err := r.client.Info(ctx).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make(map[string]string)
|
||||
lines := strings.Split(info, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
parts := strings.SplitN(line, ":", 2)
|
||||
if len(parts) == 2 {
|
||||
result[parts[0]] = parts[1]
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetDatabases returns information about all databases
|
||||
func (r *RedisClientImpl) GetDatabases() ([]RedisDBInfo, error) {
|
||||
if r.client == nil {
|
||||
return nil, fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Get keyspace info
|
||||
info, err := r.client.Info(ctx, "keyspace").Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse keyspace info
|
||||
dbMap := make(map[int]int64)
|
||||
lines := strings.Split(info, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "db") {
|
||||
// Format: db0:keys=123,expires=0,avg_ttl=0
|
||||
parts := strings.SplitN(line, ":", 2)
|
||||
if len(parts) != 2 {
|
||||
continue
|
||||
}
|
||||
dbIndex, err := strconv.Atoi(strings.TrimPrefix(parts[0], "db"))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// Parse keys count
|
||||
kvPairs := strings.Split(parts[1], ",")
|
||||
for _, kv := range kvPairs {
|
||||
if strings.HasPrefix(kv, "keys=") {
|
||||
keys, _ := strconv.ParseInt(strings.TrimPrefix(kv, "keys="), 10, 64)
|
||||
dbMap[dbIndex] = keys
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return all 16 databases (0-15)
|
||||
result := make([]RedisDBInfo, 16)
|
||||
for i := 0; i < 16; i++ {
|
||||
result[i] = RedisDBInfo{
|
||||
Index: i,
|
||||
Keys: dbMap[i], // Will be 0 if not in map
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// SelectDB selects a database
|
||||
func (r *RedisClientImpl) SelectDB(index int) error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
if index < 0 || index > 15 {
|
||||
return fmt.Errorf("数据库索引必须在 0-15 之间")
|
||||
}
|
||||
|
||||
// Create new client with different DB
|
||||
addr := fmt.Sprintf("%s:%d", r.config.Host, r.config.Port)
|
||||
if r.forwarder != nil {
|
||||
addr = r.forwarder.LocalAddr
|
||||
}
|
||||
|
||||
opts := &redis.Options{
|
||||
Addr: addr,
|
||||
Password: r.config.Password,
|
||||
DB: index,
|
||||
DialTimeout: time.Duration(r.config.Timeout) * time.Second,
|
||||
ReadTimeout: time.Duration(r.config.Timeout) * time.Second,
|
||||
WriteTimeout: time.Duration(r.config.Timeout) * time.Second,
|
||||
}
|
||||
|
||||
if opts.DialTimeout == 0 {
|
||||
opts.DialTimeout = 30 * time.Second
|
||||
opts.ReadTimeout = 30 * time.Second
|
||||
opts.WriteTimeout = 30 * time.Second
|
||||
}
|
||||
|
||||
newClient := redis.NewClient(opts)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), opts.DialTimeout)
|
||||
defer cancel()
|
||||
|
||||
if err := newClient.Ping(ctx).Err(); err != nil {
|
||||
newClient.Close()
|
||||
return fmt.Errorf("切换数据库失败: %w", err)
|
||||
}
|
||||
|
||||
// Close old client and replace
|
||||
r.client.Close()
|
||||
r.client = newClient
|
||||
r.currentDB = index
|
||||
|
||||
logger.Infof("Redis 切换到数据库: db%d", index)
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCurrentDB returns the current database index
|
||||
func (r *RedisClientImpl) GetCurrentDB() int {
|
||||
return r.currentDB
|
||||
}
|
||||
|
||||
// FlushDB flushes the current database
|
||||
func (r *RedisClientImpl) FlushDB() error {
|
||||
if r.client == nil {
|
||||
return fmt.Errorf("Redis 客户端未连接")
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
return r.client.FlushDB(ctx).Err()
|
||||
}
|
||||
@@ -3,8 +3,10 @@ package ssh
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -110,3 +112,264 @@ func RegisterSSHNetwork(sshConfig connection.SSHConfig) (string, error) {
|
||||
|
||||
return netName, nil
|
||||
}
|
||||
|
||||
// sshClientCache stores SSH clients to avoid creating multiple connections
|
||||
var (
|
||||
sshClientCache = make(map[string]*ssh.Client)
|
||||
sshClientCacheMu sync.RWMutex
|
||||
localForwarders = make(map[string]*LocalForwarder)
|
||||
forwarderMu sync.RWMutex
|
||||
)
|
||||
|
||||
// LocalForwarder represents a local port forwarder through SSH
|
||||
type LocalForwarder struct {
|
||||
LocalAddr string
|
||||
RemoteAddr string
|
||||
SSHClient *ssh.Client
|
||||
listener net.Listener
|
||||
closeChan chan struct{}
|
||||
closeOnce sync.Once // 防止重复关闭
|
||||
closed bool // 关闭状态标记
|
||||
closedMu sync.RWMutex
|
||||
}
|
||||
|
||||
// NewLocalForwarder creates a new local port forwarder
|
||||
// It listens on a random local port and forwards all connections through SSH tunnel
|
||||
func NewLocalForwarder(sshConfig connection.SSHConfig, remoteHost string, remotePort int) (*LocalForwarder, error) {
|
||||
client, err := GetOrCreateSSHClient(sshConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("建立 SSH 连接失败:%w", err)
|
||||
}
|
||||
|
||||
// Listen on localhost with a random port
|
||||
listener, err := net.Listen("tcp", "127.0.0.1:0")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("创建本地监听器失败:%w", err)
|
||||
}
|
||||
|
||||
localAddr := listener.Addr().String()
|
||||
remoteAddr := fmt.Sprintf("%s:%d", remoteHost, remotePort)
|
||||
|
||||
forwarder := &LocalForwarder{
|
||||
LocalAddr: localAddr,
|
||||
RemoteAddr: remoteAddr,
|
||||
SSHClient: client,
|
||||
listener: listener,
|
||||
closeChan: make(chan struct{}),
|
||||
}
|
||||
|
||||
// Start forwarding in background
|
||||
go forwarder.forward()
|
||||
|
||||
logger.Infof("已创建 SSH 端口转发:本地 %s -> 远程 %s", localAddr, remoteAddr)
|
||||
return forwarder, nil
|
||||
}
|
||||
|
||||
// forward handles the port forwarding
|
||||
func (f *LocalForwarder) forward() {
|
||||
for {
|
||||
localConn, err := f.listener.Accept()
|
||||
if err != nil {
|
||||
// Check if we're shutting down
|
||||
select {
|
||||
case <-f.closeChan:
|
||||
return
|
||||
default:
|
||||
logger.Warnf("接受本地连接失败:%v", err)
|
||||
// listener可能已关闭,退出循环
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
go f.handleConnection(localConn)
|
||||
}
|
||||
}
|
||||
|
||||
// handleConnection handles a single connection
|
||||
func (f *LocalForwarder) handleConnection(localConn net.Conn) {
|
||||
defer localConn.Close()
|
||||
|
||||
// Connect to remote through SSH with timeout
|
||||
remoteConn, err := f.SSHClient.Dial("tcp", f.RemoteAddr)
|
||||
if err != nil {
|
||||
logger.Warnf("通过 SSH 连接到远程 %s 失败:%v", f.RemoteAddr, err)
|
||||
return
|
||||
}
|
||||
defer remoteConn.Close()
|
||||
|
||||
// Bidirectional copy with error channel
|
||||
errc := make(chan error, 2)
|
||||
|
||||
// Copy from local to remote
|
||||
go func() {
|
||||
_, err := io.Copy(remoteConn, localConn)
|
||||
if err != nil {
|
||||
logger.Warnf("本地->远程数据复制错误:%v", err)
|
||||
}
|
||||
errc <- err
|
||||
}()
|
||||
|
||||
// Copy from remote to local
|
||||
go func() {
|
||||
_, err := io.Copy(localConn, remoteConn)
|
||||
if err != nil {
|
||||
logger.Warnf("远程->本地数据复制错误:%v", err)
|
||||
}
|
||||
errc <- err
|
||||
}()
|
||||
|
||||
// Wait for BOTH goroutines to complete
|
||||
<-errc
|
||||
<-errc
|
||||
}
|
||||
|
||||
// Close closes the forwarder (thread-safe, can be called multiple times)
|
||||
func (f *LocalForwarder) Close() error {
|
||||
var err error
|
||||
f.closeOnce.Do(func() {
|
||||
f.closedMu.Lock()
|
||||
f.closed = true
|
||||
f.closedMu.Unlock()
|
||||
|
||||
close(f.closeChan)
|
||||
err = f.listener.Close()
|
||||
if err != nil {
|
||||
logger.Warnf("关闭端口转发监听器失败:%v", err)
|
||||
}
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
// IsClosed returns whether the forwarder is closed
|
||||
func (f *LocalForwarder) IsClosed() bool {
|
||||
f.closedMu.RLock()
|
||||
defer f.closedMu.RUnlock()
|
||||
return f.closed
|
||||
}
|
||||
|
||||
// GetOrCreateLocalForwarder returns a cached forwarder or creates a new one
|
||||
func GetOrCreateLocalForwarder(sshConfig connection.SSHConfig, remoteHost string, remotePort int) (*LocalForwarder, error) {
|
||||
key := fmt.Sprintf("%s:%d:%s->%s:%d",
|
||||
sshConfig.Host, sshConfig.Port, sshConfig.User,
|
||||
remoteHost, remotePort)
|
||||
|
||||
forwarderMu.RLock()
|
||||
forwarder, exists := localForwarders[key]
|
||||
forwarderMu.RUnlock()
|
||||
|
||||
// Check if exists and is still valid
|
||||
if exists && forwarder != nil && !forwarder.IsClosed() {
|
||||
logger.Infof("复用已有端口转发:%s", key)
|
||||
return forwarder, nil
|
||||
}
|
||||
|
||||
// Remove stale forwarder from cache
|
||||
if exists {
|
||||
forwarderMu.Lock()
|
||||
delete(localForwarders, key)
|
||||
forwarderMu.Unlock()
|
||||
}
|
||||
|
||||
forwarder, err := NewLocalForwarder(sshConfig, remoteHost, remotePort)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
forwarderMu.Lock()
|
||||
localForwarders[key] = forwarder
|
||||
forwarderMu.Unlock()
|
||||
|
||||
return forwarder, nil
|
||||
}
|
||||
|
||||
// CloseAllForwarders closes all local forwarders
|
||||
func CloseAllForwarders() {
|
||||
forwarderMu.Lock()
|
||||
defer forwarderMu.Unlock()
|
||||
|
||||
for key, forwarder := range localForwarders {
|
||||
if forwarder != nil {
|
||||
_ = forwarder.Close()
|
||||
logger.Infof("已关闭端口转发:%s", key)
|
||||
}
|
||||
}
|
||||
localForwarders = make(map[string]*LocalForwarder)
|
||||
}
|
||||
|
||||
|
||||
// getSSHClientCacheKey generates a unique cache key for SSH config
|
||||
func getSSHClientCacheKey(config connection.SSHConfig) string {
|
||||
return fmt.Sprintf("%s:%d:%s", config.Host, config.Port, config.User)
|
||||
}
|
||||
|
||||
// GetOrCreateSSHClient returns a cached SSH client or creates a new one
|
||||
func GetOrCreateSSHClient(config connection.SSHConfig) (*ssh.Client, error) {
|
||||
key := getSSHClientCacheKey(config)
|
||||
|
||||
sshClientCacheMu.RLock()
|
||||
client, exists := sshClientCache[key]
|
||||
sshClientCacheMu.RUnlock()
|
||||
|
||||
if exists && client != nil {
|
||||
// Test if connection is still alive by creating a test session
|
||||
session, err := client.NewSession()
|
||||
if err == nil {
|
||||
session.Close()
|
||||
logger.Infof("复用已有 SSH 连接:%s", key)
|
||||
return client, nil
|
||||
}
|
||||
// Connection is dead, remove from cache
|
||||
logger.Warnf("SSH 连接已断开,重新建立:%s (错误: %v)", key, err)
|
||||
sshClientCacheMu.Lock()
|
||||
delete(sshClientCache, key)
|
||||
sshClientCacheMu.Unlock()
|
||||
// Try to close the dead client
|
||||
_ = client.Close()
|
||||
}
|
||||
|
||||
// Create new SSH client
|
||||
client, err := connectSSH(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Cache the client
|
||||
sshClientCacheMu.Lock()
|
||||
sshClientCache[key] = client
|
||||
sshClientCacheMu.Unlock()
|
||||
|
||||
logger.Infof("已缓存 SSH 连接:%s", key)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// DialThroughSSH creates a connection through SSH tunnel
|
||||
// This is a generic dialer that can be used by any database driver
|
||||
func DialThroughSSH(config connection.SSHConfig, network, address string) (net.Conn, error) {
|
||||
client, err := GetOrCreateSSHClient(config)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("建立 SSH 连接失败:%w", err)
|
||||
}
|
||||
|
||||
conn, err := client.Dial(network, address)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("通过 SSH 隧道连接到 %s 失败:%w", address, err)
|
||||
}
|
||||
|
||||
logger.Infof("已通过 SSH 隧道连接到:%s", address)
|
||||
return conn, nil
|
||||
}
|
||||
|
||||
// CloseAllSSHClients closes all cached SSH clients
|
||||
func CloseAllSSHClients() {
|
||||
sshClientCacheMu.Lock()
|
||||
defer sshClientCacheMu.Unlock()
|
||||
|
||||
for key, client := range sshClientCache {
|
||||
if client != nil {
|
||||
_ = client.Close()
|
||||
logger.Infof("已关闭 SSH 连接:%s", key)
|
||||
}
|
||||
}
|
||||
sshClientCache = make(map[string]*ssh.Client)
|
||||
}
|
||||
|
||||
|
||||
198
internal/sync/analyze.go
Normal file
198
internal/sync/analyze.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TableDiffSummary struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn,omitempty"`
|
||||
CanSync bool `json:"canSync"`
|
||||
Inserts int `json:"inserts"`
|
||||
Updates int `json:"updates"`
|
||||
Deletes int `json:"deletes"`
|
||||
Same int `json:"same"`
|
||||
Message string `json:"message,omitempty"`
|
||||
HasSchema bool `json:"hasSchema,omitempty"`
|
||||
}
|
||||
|
||||
type SyncAnalyzeResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Tables []TableDiffSummary `json:"tables"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Analyze(config SyncConfig) SyncAnalyzeResult {
|
||||
result := SyncAnalyzeResult{Success: true, Tables: []TableDiffSummary{}}
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, nil, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "差异分析开始")
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化源数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化目标数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "源数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "目标数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("分析表(%d/%d)", i+1, totalTables))
|
||||
|
||||
summary := TableDiffSummary{
|
||||
Table: tableName,
|
||||
CanSync: false,
|
||||
Inserts: 0,
|
||||
Updates: 0,
|
||||
Deletes: 0,
|
||||
Same: 0,
|
||||
Message: "",
|
||||
HasSchema: syncSchema,
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
summary.Message = "获取源表字段失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
if !syncData {
|
||||
summary.CanSync = true
|
||||
summary.Message = "仅同步结构,未执行数据差异分析"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
summary.Message = "无主键,不支持数据对比/同步"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
summary.Message = fmt.Sprintf("复合主键(%s),暂不支持数据对比/同步", strings.Join(pkCols, ","))
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
summary.PKColumn = pkCols[0]
|
||||
|
||||
// Query data for diff
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取源表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取目标表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCol := summary.PKColumn
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changed := false
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if changed {
|
||||
summary.Updates++
|
||||
} else {
|
||||
summary.Same++
|
||||
}
|
||||
} else {
|
||||
summary.Inserts++
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; !ok {
|
||||
summary.Deletes++
|
||||
}
|
||||
}
|
||||
|
||||
summary.CanSync = true
|
||||
result.Tables = append(result.Tables, summary)
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "差异分析完成")
|
||||
result.Message = fmt.Sprintf("已完成 %d 张表的差异分析", len(result.Tables))
|
||||
return result
|
||||
}
|
||||
164
internal/sync/preview.go
Normal file
164
internal/sync/preview.go
Normal file
@@ -0,0 +1,164 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PreviewRow struct {
|
||||
PK string `json:"pk"`
|
||||
Row map[string]interface{} `json:"row"`
|
||||
}
|
||||
|
||||
type PreviewUpdateRow struct {
|
||||
PK string `json:"pk"`
|
||||
ChangedColumns []string `json:"changedColumns"`
|
||||
Source map[string]interface{} `json:"source"`
|
||||
Target map[string]interface{} `json:"target"`
|
||||
}
|
||||
|
||||
type TableDiffPreview struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn"`
|
||||
TotalInserts int `json:"totalInserts"`
|
||||
TotalUpdates int `json:"totalUpdates"`
|
||||
TotalDeletes int `json:"totalDeletes"`
|
||||
Inserts []PreviewRow `json:"inserts"`
|
||||
Updates []PreviewUpdateRow `json:"updates"`
|
||||
Deletes []PreviewRow `json:"deletes"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Preview(config SyncConfig, tableName string, limit int) (TableDiffPreview, error) {
|
||||
if limit <= 0 {
|
||||
limit = 200
|
||||
}
|
||||
if limit > 500 {
|
||||
limit = 500
|
||||
}
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化源数据库驱动失败: %w", err)
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化目标数据库驱动失败: %w", err)
|
||||
}
|
||||
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("源数据库连接失败: %w", err)
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("目标数据库连接失败: %w", err)
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
return TableDiffPreview{}, fmt.Errorf("无主键,不支持数据预览")
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
return TableDiffPreview{}, fmt.Errorf("复合主键(%s),暂不支持数据预览", strings.Join(pkCols, ","))
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取源表失败: %w", err)
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取目标表失败: %w", err)
|
||||
}
|
||||
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
out := TableDiffPreview{
|
||||
Table: tableName,
|
||||
PKColumn: pkCol,
|
||||
TotalInserts: 0,
|
||||
TotalUpdates: 0,
|
||||
TotalDeletes: 0,
|
||||
Inserts: make([]PreviewRow, 0),
|
||||
Updates: make([]PreviewUpdateRow, 0),
|
||||
Deletes: make([]PreviewRow, 0),
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changedColumns := make([]string, 0)
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changedColumns = append(changedColumns, k)
|
||||
}
|
||||
}
|
||||
if len(changedColumns) > 0 {
|
||||
out.TotalUpdates++
|
||||
if len(out.Updates) < limit {
|
||||
out.Updates = append(out.Updates, PreviewUpdateRow{
|
||||
PK: pkVal,
|
||||
ChangedColumns: changedColumns,
|
||||
Source: sRow,
|
||||
Target: tRow,
|
||||
})
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
out.TotalInserts++
|
||||
if len(out.Inserts) < limit {
|
||||
out.Inserts = append(out.Inserts, PreviewRow{PK: pkVal, Row: sRow})
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; ok {
|
||||
continue
|
||||
}
|
||||
out.TotalDeletes++
|
||||
if len(out.Deletes) < limit {
|
||||
out.Deletes = append(out.Deletes, PreviewRow{PK: pkVal, Row: row})
|
||||
}
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
58
internal/sync/row_selection.go
Normal file
58
internal/sync/row_selection.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func filterRowsByPKSelection(pkCol string, rows []map[string]interface{}, enabled bool, selectedPKs []string) []map[string]interface{} {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return rows
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return rows
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(rows))
|
||||
for _, row := range rows {
|
||||
pkStr := fmt.Sprintf("%v", row[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, row)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdatesByPKSelection(pkCol string, updates []connection.UpdateRow, enabled bool, selectedPKs []string) []connection.UpdateRow {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(updates) == 0 {
|
||||
return updates
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
pkStr := fmt.Sprintf("%v", u.Keys[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, u)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
97
internal/sync/schema_align.go
Normal file
97
internal/sync/schema_align.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func collectRequiredColumns(inserts []map[string]interface{}, updates []connection.UpdateRow) map[string]string {
|
||||
// key: lower(columnName), value: original columnName
|
||||
required := make(map[string]string)
|
||||
for _, row := range inserts {
|
||||
for k := range row {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, u := range updates {
|
||||
for k := range u.Values {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
return required
|
||||
}
|
||||
|
||||
func filterInsertRows(inserts []map[string]interface{}, allowedLower map[string]struct{}) []map[string]interface{} {
|
||||
if len(inserts) == 0 || len(allowedLower) == 0 {
|
||||
return inserts
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(inserts))
|
||||
for _, row := range inserts {
|
||||
if len(row) == 0 {
|
||||
out = append(out, row)
|
||||
continue
|
||||
}
|
||||
n := make(map[string]interface{}, len(row))
|
||||
for k, v := range row {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
n[k] = v
|
||||
}
|
||||
}
|
||||
out = append(out, n)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdateRows(updates []connection.UpdateRow, allowedLower map[string]struct{}) []connection.UpdateRow {
|
||||
if len(updates) == 0 || len(allowedLower) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
if len(u.Values) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
values := make(map[string]interface{}, len(u.Values))
|
||||
for k, v := range u.Values {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
values[k] = v
|
||||
}
|
||||
}
|
||||
if len(values) == 0 {
|
||||
continue
|
||||
}
|
||||
out = append(out, connection.UpdateRow{
|
||||
Keys: u.Keys,
|
||||
Values: values,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func sanitizeMySQLColumnType(t string) string {
|
||||
tt := strings.TrimSpace(t)
|
||||
if tt == "" {
|
||||
return "TEXT"
|
||||
}
|
||||
|
||||
// 基础防护:避免把元数据中异常内容拼进 SQL。
|
||||
if strings.ContainsAny(tt, "`;\n\r") {
|
||||
return "TEXT"
|
||||
}
|
||||
return tt
|
||||
}
|
||||
101
internal/sync/schema_sync.go
Normal file
101
internal/sync/schema_sync.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (s *SyncEngine) syncTableSchema(config SyncConfig, res *SyncResult, sourceDB db.Database, targetDB db.Database, tableName string) error {
|
||||
targetType := strings.ToLower(strings.TrimSpace(config.TargetConfig.Type))
|
||||
if targetType != "mysql" {
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标数据库类型=%s 暂不支持结构同步,已跳过表 %s", config.TargetConfig.Type, tableName))
|
||||
return nil
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 1) 获取源表字段
|
||||
sourceCols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
// 2) 确保目标表存在
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
if sourceType != "mysql" {
|
||||
return fmt.Errorf("目标表不存在且源类型=%s 暂不支持自动建表: %w", config.SourceConfig.Type, err)
|
||||
}
|
||||
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标表 %s 不存在,开始尝试创建表结构", tableName))
|
||||
createSQL, errCreate := sourceDB.GetCreateStatement(sourceSchema, sourceTable)
|
||||
if errCreate != nil || strings.TrimSpace(createSQL) == "" {
|
||||
if errCreate == nil {
|
||||
errCreate = fmt.Errorf("建表语句为空")
|
||||
}
|
||||
return fmt.Errorf("获取源表建表语句失败: %w", errCreate)
|
||||
}
|
||||
|
||||
if _, errExec := targetDB.Exec(createSQL); errExec != nil {
|
||||
return fmt.Errorf("创建目标表失败: %w", errExec)
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("目标表创建成功:%s", tableName))
|
||||
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建目标表后获取字段失败: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
// 3) 补齐目标缺失字段(安全策略:新增字段统一允许 NULL)
|
||||
missing := make([]string, 0)
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
for _, c := range sourceCols {
|
||||
colName := strings.TrimSpace(c.Name)
|
||||
if colName == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(colName)
|
||||
if _, ok := targetColSet[lower]; ok {
|
||||
continue
|
||||
}
|
||||
missing = append(missing, colName)
|
||||
|
||||
colType := "TEXT"
|
||||
if sourceType == "mysql" {
|
||||
colType = sanitizeMySQLColumnType(c.Type)
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, res, "error", fmt.Sprintf(" -> 补字段失败:表=%s 字段=%s 错误=%v", tableName, colName, err))
|
||||
continue
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf(" -> 已补齐字段:表=%s 字段=%s 类型=%s", tableName, colName, colType))
|
||||
}
|
||||
|
||||
if len(missing) == 0 {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构一致:%s", tableName))
|
||||
} else {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构同步完成:%s(新增字段 %d 个)", tableName, len(missing)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
112
internal/sync/sql_helpers.go
Normal file
112
internal/sync/sql_helpers.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package sync
|
||||
|
||||
import "strings"
|
||||
|
||||
func normalizeSyncMode(mode string) string {
|
||||
m := strings.ToLower(strings.TrimSpace(mode))
|
||||
switch m {
|
||||
case "", "insert_update":
|
||||
return "insert_update"
|
||||
case "insert_only":
|
||||
return "insert_only"
|
||||
case "full_overwrite":
|
||||
return "full_overwrite"
|
||||
default:
|
||||
return "insert_update"
|
||||
}
|
||||
}
|
||||
|
||||
func quoteIdentByType(dbType string, ident string) string {
|
||||
if ident == "" {
|
||||
return ident
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "sphinx":
|
||||
return "`" + strings.ReplaceAll(ident, "`", "``") + "`"
|
||||
case "sqlserver":
|
||||
escaped := strings.ReplaceAll(ident, "]", "]]")
|
||||
return "[" + escaped + "]"
|
||||
default:
|
||||
return `"` + strings.ReplaceAll(ident, `"`, `""`) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
func quoteQualifiedIdentByType(dbType string, ident string) string {
|
||||
raw := strings.TrimSpace(ident)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
|
||||
parts := strings.Split(raw, ".")
|
||||
if len(parts) <= 1 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
|
||||
quotedParts := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
quotedParts = append(quotedParts, quoteIdentByType(dbType, part))
|
||||
}
|
||||
|
||||
if len(quotedParts) == 0 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
return strings.Join(quotedParts, ".")
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTable(dbType string, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase", "vastbase":
|
||||
return "public", rawTable
|
||||
default:
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
func qualifiedNameForQuery(dbType string, schema string, table string, original string) string {
|
||||
raw := strings.TrimSpace(original)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
if strings.Contains(raw, ".") {
|
||||
return raw
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase", "vastbase":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" {
|
||||
s = "public"
|
||||
}
|
||||
if table == "" {
|
||||
return raw
|
||||
}
|
||||
return s + "." + table
|
||||
case "mysql", "mariadb", "sphinx":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" || table == "" {
|
||||
return table
|
||||
}
|
||||
return s + "." + table
|
||||
default:
|
||||
return table
|
||||
}
|
||||
}
|
||||
@@ -5,15 +5,21 @@ import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// SyncConfig defines the parameters for a synchronization task
|
||||
type SyncConfig struct {
|
||||
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
|
||||
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
|
||||
Tables []string `json:"tables"` // Tables to sync
|
||||
Mode string `json:"mode"` // "insert_update", "full_overwrite"
|
||||
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
|
||||
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
|
||||
Tables []string `json:"tables"` // Tables to sync
|
||||
Content string `json:"content,omitempty"` // "data", "schema", "both"
|
||||
Mode string `json:"mode"` // "insert_update", "insert_only", "full_overwrite"
|
||||
JobID string `json:"jobId,omitempty"`
|
||||
AutoAddColumns bool `json:"autoAddColumns,omitempty"` // 自动补齐缺失字段(当前仅 MySQL 目标支持)
|
||||
TableOptions map[string]TableOptions `json:"tableOptions,omitempty"`
|
||||
}
|
||||
|
||||
// SyncResult holds the result of the sync operation
|
||||
@@ -28,21 +34,55 @@ type SyncResult struct {
|
||||
}
|
||||
|
||||
type SyncEngine struct {
|
||||
reporter Reporter
|
||||
}
|
||||
|
||||
func NewSyncEngine() *SyncEngine {
|
||||
return &SyncEngine{}
|
||||
func NewSyncEngine(reporter Reporter) *SyncEngine {
|
||||
return &SyncEngine{reporter: reporter}
|
||||
}
|
||||
|
||||
// CompareAndSync performs the synchronization
|
||||
func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
|
||||
result := SyncResult{Success: true, Logs: []string{}}
|
||||
logger.Infof("开始数据同步:源=%s 目标=%s 表数量=%d", formatConnSummaryForSync(config.SourceConfig), formatConnSummaryForSync(config.TargetConfig), len(config.Tables))
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "开始同步")
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
modeRaw := strings.ToLower(strings.TrimSpace(config.Mode))
|
||||
if modeRaw != "" && modeRaw != "insert_update" && modeRaw != "insert_only" && modeRaw != "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步模式 %q,已自动使用 insert_update", config.Mode))
|
||||
}
|
||||
defaultMode := normalizeSyncMode(config.Mode)
|
||||
|
||||
contentLabel := "仅同步数据"
|
||||
if syncSchema && syncData {
|
||||
contentLabel = "同步结构+数据"
|
||||
} else if syncSchema {
|
||||
contentLabel = "仅同步结构"
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("同步内容:%s;模式:%s;自动补字段:%v", contentLabel, defaultMode, config.AutoAddColumns))
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return s.fail(result, "初始化源数据库驱动失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "初始化源数据库驱动失败: "+err.Error())
|
||||
}
|
||||
if config.SourceConfig.Type == "custom" {
|
||||
// Custom DB setup would go here if needed
|
||||
@@ -51,133 +91,402 @@ func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return s.fail(result, "初始化目标数据库驱动失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "初始化目标数据库驱动失败: "+err.Error())
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在连接源数据库: %s...", config.SourceConfig.Host))
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接源数据库: %s...", config.SourceConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接源数据库")
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return s.fail(result, "源数据库连接失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "源数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在连接目标数据库: %s...", config.TargetConfig.Host))
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接目标数据库: %s...", config.TargetConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接目标数据库")
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return s.fail(result, "目标数据库连接失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "目标数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
// Iterate Tables
|
||||
for _, tableName := range config.Tables {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在同步表: %s", tableName))
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
tableMode := defaultMode
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在同步表: %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("同步表(%d/%d)", i+1, totalTables))
|
||||
defer s.progress(config.JobID, i+1, totalTables, tableName, "表处理完成")
|
||||
|
||||
// 1. Get Columns & PKs (Naive approach: assume same schema)
|
||||
cols, err := sourceDB.GetColumns(config.SourceConfig.Database, tableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "获取源表列信息失败:表=%s", tableName)
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("获取表 %s 的列信息失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
|
||||
pkCol := ""
|
||||
for _, col := range cols {
|
||||
if col.Key == "PRI" || col.Key == "PK" {
|
||||
pkCol = col.Name
|
||||
break
|
||||
if syncSchema {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "同步表结构")
|
||||
if err := s.syncTableSchema(config, &result, sourceDB, targetDB, tableName); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("表结构同步失败:表=%s 错误=%v", tableName, err))
|
||||
return
|
||||
}
|
||||
}
|
||||
if !syncData {
|
||||
result.TablesSynced++
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if pkCol == "" {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("跳过表 %s: 未找到主键 (同步需要主键)", tableName))
|
||||
continue
|
||||
}
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 2. Fetch Data (MEMORY INTENSIVE - PROTOTYPE ONLY)
|
||||
// TODO: Implement paging/streaming
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", tableName))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取源表失败:表=%s", tableName)
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("读取源表 %s 失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
// 1. Get Columns & PKs
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
logger.Error(err, "获取源表列信息失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("获取表 %s 的列信息失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
sourceColsByLower := make(map[string]connection.ColumnDefinition, len(cols))
|
||||
for _, col := range cols {
|
||||
if strings.TrimSpace(col.Name) == "" {
|
||||
continue
|
||||
}
|
||||
sourceColsByLower[strings.ToLower(strings.TrimSpace(col.Name))] = col
|
||||
}
|
||||
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", tableName))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取目标表失败:表=%s", tableName)
|
||||
// Table might not exist in target?
|
||||
// Check if error is "table not found" -> Try to Create?
|
||||
// For now, assume table exists.
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("读取目标表 %s 失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, col := range cols {
|
||||
if col.Key == "PRI" || col.Key == "PK" {
|
||||
pkCols = append(pkCols, col.Name)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Compare (In-Memory Hash Map)
|
||||
targetMap := make(map[string]map[string]interface{})
|
||||
for _, row := range targetRows {
|
||||
pkVal := fmt.Sprintf("%v", row[pkCol])
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 未找到主键,已跳过数据同步(避免产生重复数据)", tableName))
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 为复合主键(%s),当前暂不支持数据同步", tableName, strings.Join(pkCols, ",")))
|
||||
return
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
var inserts []map[string]interface{}
|
||||
var updates []connection.UpdateRow
|
||||
// var deletes []map[string]interface{} // Not implemented in "insert_update" mode usually
|
||||
|
||||
for _, sRow := range sourceRows {
|
||||
pkVal := fmt.Sprintf("%v", sRow[pkCol])
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
// Update? Compare values
|
||||
// Simplified: Compare string representations or iterate keys
|
||||
// For prototype: assume update if exists
|
||||
// Optimization: Check diff
|
||||
changes := make(map[string]interface{})
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changes[k] = v
|
||||
opts := TableOptions{Insert: true, Update: true, Delete: false}
|
||||
if config.TableOptions != nil {
|
||||
if t, ok := config.TableOptions[tableName]; ok {
|
||||
opts = t
|
||||
// 默认防护:如用户未设置任意一个字段,保持 insert/update 默认 true、delete 默认 false
|
||||
if !t.Insert && !t.Update && !t.Delete {
|
||||
opts = t
|
||||
}
|
||||
}
|
||||
if len(changes) > 0 {
|
||||
updates = append(updates, connection.UpdateRow{
|
||||
Keys: map[string]interface{}{pkCol: pkVal},
|
||||
Values: changes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Insert
|
||||
inserts = append(inserts, sRow)
|
||||
}
|
||||
}
|
||||
if !opts.Insert && !opts.Update && !opts.Delete {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("表 %s 未勾选任何操作,已跳过", tableName))
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Apply Changes
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
}
|
||||
// 2. Fetch Data (MEMORY INTENSIVE - PROTOTYPE ONLY)
|
||||
// TODO: Implement paging/streaming
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取源表数据")
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取源表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取源表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
if len(inserts) > 0 || len(updates) > 0 {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行", len(inserts), len(updates)))
|
||||
var inserts []map[string]interface{}
|
||||
var updates []connection.UpdateRow
|
||||
|
||||
// We need a BatchApplier interface or assume Database implements ApplyChanges
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(tableName, changeSet); err != nil {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
if tableMode == "insert_update" {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取目标表数据")
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取目标表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取目标表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
// 3. Compare (In-Memory Hash Map)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "对比差异")
|
||||
targetMap := make(map[string]map[string]interface{})
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", row[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", sRow[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changes := make(map[string]interface{})
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changes[k] = v
|
||||
}
|
||||
}
|
||||
if len(changes) > 0 {
|
||||
updates = append(updates, connection.UpdateRow{
|
||||
Keys: map[string]interface{}{pkCol: sRow[pkCol]},
|
||||
Values: changes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
inserts = append(inserts, sRow)
|
||||
}
|
||||
}
|
||||
|
||||
var deletes []map[string]interface{}
|
||||
if opts.Delete {
|
||||
for pkStr, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkStr]; ok {
|
||||
continue
|
||||
}
|
||||
deletes = append(deletes, map[string]interface{}{pkCol: row[pkCol]})
|
||||
}
|
||||
}
|
||||
|
||||
// apply operation selection
|
||||
inserts = filterRowsByPKSelection(pkCol, inserts, opts.Insert, opts.SelectedInsertPKs)
|
||||
updates = filterUpdatesByPKSelection(pkCol, updates, opts.Update, opts.SelectedUpdatePKs)
|
||||
deletes = filterRowsByPKSelection(pkCol, deletes, opts.Delete, opts.SelectedDeletePKs)
|
||||
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
Deletes: deletes,
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(changeSet.Inserts, changeSet.Updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(inserts)
|
||||
result.RowsUpdated += len(updates)
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
changeSet.Inserts = filterInsertRows(changeSet.Inserts, targetColSet)
|
||||
changeSet.Updates = filterUpdateRows(changeSet.Updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 || len(changeSet.Deletes) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行, 需删除: %d 行", len(changeSet.Inserts), len(changeSet.Updates), len(changeSet.Deletes)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
result.RowsDeleted += len(changeSet.Deletes)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
return
|
||||
} else {
|
||||
// insert_only / full_overwrite: do not compare target, just insert source rows
|
||||
inserts = sourceRows
|
||||
}
|
||||
|
||||
// full_overwrite: clear target table first
|
||||
if tableMode == "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 全量覆盖模式:即将清空目标表 %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, "清空目标表")
|
||||
clearSQL := ""
|
||||
if strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
clearSQL = fmt.Sprintf("TRUNCATE TABLE %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
} else {
|
||||
clearSQL = fmt.Sprintf("DELETE FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
}
|
||||
if _, err := targetDB.Exec(clearSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 清空目标表失败: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(inserts, updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
inserts = filterInsertRows(inserts, targetColSet)
|
||||
updates = filterUpdateRows(updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
}
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行", len(changeSet.Inserts), len(changeSet.Updates)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
result.Logs = append(result.Logs, " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
} else {
|
||||
result.Logs = append(result.Logs, " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
result.TablesSynced++
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "同步完成")
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -196,9 +505,52 @@ func formatConnSummaryForSync(config connection.ConnectionConfig) string {
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds)
|
||||
}
|
||||
|
||||
func (s *SyncEngine) fail(res SyncResult, msg string) SyncResult {
|
||||
func (s *SyncEngine) appendLog(jobID string, res *SyncResult, level string, msg string) {
|
||||
if res != nil {
|
||||
res.Logs = append(res.Logs, msg)
|
||||
}
|
||||
if s.reporter.OnLog != nil && strings.TrimSpace(jobID) != "" {
|
||||
s.reporter.OnLog(SyncLogEvent{
|
||||
JobID: jobID,
|
||||
Level: level,
|
||||
Message: msg,
|
||||
Ts: time.Now().UnixMilli(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SyncEngine) progress(jobID string, current, total int, table string, stage string) {
|
||||
if s.reporter.OnProgress == nil || strings.TrimSpace(jobID) == "" {
|
||||
return
|
||||
}
|
||||
percent := 0
|
||||
if total <= 0 {
|
||||
if current > 0 {
|
||||
percent = 100
|
||||
}
|
||||
} else {
|
||||
if current < 0 {
|
||||
current = 0
|
||||
}
|
||||
if current > total {
|
||||
current = total
|
||||
}
|
||||
percent = (current * 100) / total
|
||||
}
|
||||
s.reporter.OnProgress(SyncProgressEvent{
|
||||
JobID: jobID,
|
||||
Percent: percent,
|
||||
Current: current,
|
||||
Total: total,
|
||||
Table: table,
|
||||
Stage: stage,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SyncEngine) fail(jobID string, totalTables int, res SyncResult, msg string) SyncResult {
|
||||
res.Success = false
|
||||
res.Message = msg
|
||||
res.Logs = append(res.Logs, "致命错误: "+msg)
|
||||
s.appendLog(jobID, &res, "error", "致命错误: "+msg)
|
||||
s.progress(jobID, res.TablesSynced, totalTables, "", "同步失败")
|
||||
return res
|
||||
}
|
||||
|
||||
30
internal/sync/sync_events.go
Normal file
30
internal/sync/sync_events.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package sync
|
||||
|
||||
const (
|
||||
EventSyncStart = "sync:start"
|
||||
EventSyncProgress = "sync:progress"
|
||||
EventSyncLog = "sync:log"
|
||||
EventSyncDone = "sync:done"
|
||||
)
|
||||
|
||||
type SyncLogEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Level string `json:"level"` // info/warn/error
|
||||
Message string `json:"message"`
|
||||
Ts int64 `json:"ts"` // Unix milli
|
||||
}
|
||||
|
||||
type SyncProgressEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Percent int `json:"percent"`
|
||||
Current int `json:"current"` // 已完成表数
|
||||
Total int `json:"total"` // 总表数
|
||||
Table string `json:"table,omitempty"`
|
||||
Stage string `json:"stage,omitempty"`
|
||||
}
|
||||
|
||||
type Reporter struct {
|
||||
OnLog func(event SyncLogEvent)
|
||||
OnProgress func(event SyncProgressEvent)
|
||||
}
|
||||
|
||||
13
internal/sync/table_options.go
Normal file
13
internal/sync/table_options.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package sync
|
||||
|
||||
// TableOptions controls which operations to apply per table, and optional row selection.
|
||||
// 注意:如未指定 Selected*PKs,则表示“同步全部该类型差异数据”;如指定为空数组,则同样表示全部。
|
||||
type TableOptions struct {
|
||||
Insert bool `json:"insert,omitempty"`
|
||||
Update bool `json:"update,omitempty"`
|
||||
Delete bool `json:"delete,omitempty"`
|
||||
|
||||
SelectedInsertPKs []string `json:"selectedInsertPks,omitempty"`
|
||||
SelectedUpdatePKs []string `json:"selectedUpdatePks,omitempty"`
|
||||
SelectedDeletePKs []string `json:"selectedDeletePks,omitempty"`
|
||||
}
|
||||
52
logo.svg
Normal file
52
logo.svg
Normal file
@@ -0,0 +1,52 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512">
|
||||
<defs>
|
||||
<!-- Background: Soft Light Grey -->
|
||||
<linearGradient id="bgSoft" x1="0%" y1="0%" x2="0%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#f5f7fa;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#c3cfe2;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<!-- Hexagon: Solid Tech Pink -->
|
||||
<linearGradient id="solidPink" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#FF5F6D;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#FFC371;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<!-- N: Solid Tech Blue/Cyan -->
|
||||
<linearGradient id="solidCyan" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#00c6ff;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#0072ff;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
|
||||
<filter id="hardShadow" x="-20%" y="-20%" width="140%" height="140%">
|
||||
<feGaussianBlur in="SourceAlpha" stdDeviation="4"/>
|
||||
<feOffset dx="4" dy="4" result="offsetblur"/>
|
||||
<feComponentTransfer>
|
||||
<feFuncA type="linear" slope="0.2"/>
|
||||
</feComponentTransfer>
|
||||
<feMerge>
|
||||
<feMergeNode/>
|
||||
<feMergeNode in="SourceGraphic"/>
|
||||
</feMerge>
|
||||
</filter>
|
||||
</defs>
|
||||
|
||||
<!-- Background -->
|
||||
<rect x="32" y="32" width="448" height="448" rx="100" fill="url(#bgSoft)" />
|
||||
|
||||
<!-- Main Content Centered -->
|
||||
<g transform="translate(106, 106) scale(0.6)" filter="url(#hardShadow)">
|
||||
|
||||
<!-- Hex G -->
|
||||
<path d="M 250 0 L 466 125 L 466 375 L 250 500 L 34 375 L 34 125 Z"
|
||||
fill="none" stroke="url(#solidPink)" stroke-width="45" stroke-linejoin="round"/>
|
||||
|
||||
<!-- G Crossbar -->
|
||||
<path d="M 466 300 L 330 300" stroke="url(#solidPink)" stroke-width="45" stroke-linecap="round"/>
|
||||
|
||||
<!-- Inner N -->
|
||||
<path d="M 160 350 L 160 150 L 340 350 L 340 150"
|
||||
fill="none" stroke="url(#solidCyan)" stroke-width="50" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
22
main.go
22
main.go
@@ -9,6 +9,8 @@ import (
|
||||
"github.com/wailsapp/wails/v2"
|
||||
"github.com/wailsapp/wails/v2/pkg/options"
|
||||
"github.com/wailsapp/wails/v2/pkg/options/assetserver"
|
||||
"github.com/wailsapp/wails/v2/pkg/options/mac"
|
||||
"github.com/wailsapp/wails/v2/pkg/options/windows"
|
||||
)
|
||||
|
||||
//go:embed all:frontend/dist
|
||||
@@ -20,18 +22,30 @@ func main() {
|
||||
|
||||
// Create application with options
|
||||
err := wails.Run(&options.App{
|
||||
Title: "GoNavi",
|
||||
Width: 1024,
|
||||
Height: 768,
|
||||
Title: "GoNavi",
|
||||
Width: 1024,
|
||||
Height: 768,
|
||||
Frameless: true,
|
||||
AssetServer: &assetserver.Options{
|
||||
Assets: assets,
|
||||
},
|
||||
BackgroundColour: &options.RGBA{R: 27, G: 38, B: 54, A: 1},
|
||||
BackgroundColour: &options.RGBA{R: 0, G: 0, B: 0, A: 0},
|
||||
OnStartup: application.Startup,
|
||||
OnShutdown: application.Shutdown,
|
||||
Bind: []interface{}{
|
||||
application,
|
||||
},
|
||||
Windows: &windows.Options{
|
||||
WebviewIsTransparent: true,
|
||||
WindowIsTranslucent: true,
|
||||
BackdropType: windows.Acrylic,
|
||||
DisableWindowIcon: false,
|
||||
DisableFramelessWindowDecorations: false,
|
||||
},
|
||||
Mac: &mac.Options{
|
||||
WebviewIsTransparent: true,
|
||||
WindowIsTranslucent: true,
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
6
third_party/highgo-pq/.gitignore
vendored
Normal file
6
third_party/highgo-pq/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
.db
|
||||
*.test
|
||||
*~
|
||||
*.swp
|
||||
.idea
|
||||
.vscode
|
||||
8
third_party/highgo-pq/LICENSE.md
vendored
Normal file
8
third_party/highgo-pq/LICENSE.md
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
Copyright (c) 2011-2013, 'pq' Contributors
|
||||
Portions Copyright (C) 2011 Blake Mizerany
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
36
third_party/highgo-pq/README.md
vendored
Normal file
36
third_party/highgo-pq/README.md
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# pq - A pure Go postgres driver for Go's database/sql package
|
||||
|
||||
[](https://pkg.go.dev/github.com/lib/pq?tab=doc)
|
||||
|
||||
## Install
|
||||
|
||||
go get github.com/lib/pq
|
||||
|
||||
## Features
|
||||
|
||||
* SSL
|
||||
* Handles bad connections for `database/sql`
|
||||
* Scan `time.Time` correctly (i.e. `timestamp[tz]`, `time[tz]`, `date`)
|
||||
* Scan binary blobs correctly (i.e. `bytea`)
|
||||
* Package for `hstore` support
|
||||
* COPY FROM support
|
||||
* pq.ParseURL for converting urls to connection strings for sql.Open.
|
||||
* Many libpq compatible environment variables
|
||||
* Unix socket support
|
||||
* Notifications: `LISTEN`/`NOTIFY`
|
||||
* pgpass support
|
||||
* GSS (Kerberos) auth
|
||||
|
||||
## Tests
|
||||
|
||||
`go test` is used for testing. See [TESTS.md](TESTS.md) for more details.
|
||||
|
||||
## Status
|
||||
|
||||
This package is currently in maintenance mode, which means:
|
||||
1. It generally does not accept new features.
|
||||
2. It does accept bug fixes and version compatability changes provided by the community.
|
||||
3. Maintainers usually do not resolve reported issues.
|
||||
4. Community members are encouraged to help each other with reported issues.
|
||||
|
||||
For users that require new features or reliable resolution of reported bugs, we recommend using [pgx](https://github.com/jackc/pgx) which is under active development.
|
||||
33
third_party/highgo-pq/TESTS.md
vendored
Normal file
33
third_party/highgo-pq/TESTS.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# Tests
|
||||
|
||||
## Running Tests
|
||||
|
||||
`go test` is used for testing. A running PostgreSQL
|
||||
server is required, with the ability to log in. The
|
||||
database to connect to test with is "pqgotest," on
|
||||
"localhost" but these can be overridden using [environment
|
||||
variables](https://www.postgresql.org/docs/9.3/static/libpq-envars.html).
|
||||
|
||||
Example:
|
||||
|
||||
PGHOST=/run/postgresql go test
|
||||
|
||||
## Benchmarks
|
||||
|
||||
A benchmark suite can be run as part of the tests:
|
||||
|
||||
go test -bench .
|
||||
|
||||
## Example setup (Docker)
|
||||
|
||||
Run a postgres container:
|
||||
|
||||
```
|
||||
docker run --expose 5432:5432 postgres
|
||||
```
|
||||
|
||||
Run tests:
|
||||
|
||||
```
|
||||
PGHOST=localhost PGPORT=5432 PGUSER=postgres PGSSLMODE=disable PGDATABASE=postgres go test
|
||||
```
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user