mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 12:19:47 +08:00
Compare commits
72 Commits
fix/table-
...
release/0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d60ac90efe | ||
|
|
20964bcedf | ||
|
|
462ca57907 | ||
|
|
4bfdb2cb6c | ||
|
|
2733596a5d | ||
|
|
6918b56ed9 | ||
|
|
1afb8850ad | ||
|
|
3284eeba17 | ||
|
|
494484eb92 | ||
|
|
6156884455 | ||
|
|
a54b8906a3 | ||
|
|
f477feab2f | ||
|
|
e76e174bfe | ||
|
|
b904c0b107 | ||
|
|
c02e7c12e8 | ||
|
|
a87c801e66 | ||
|
|
7f00139847 | ||
|
|
78c5351399 | ||
|
|
e2acfa51eb | ||
|
|
9a684cd82c | ||
|
|
e3b142053f | ||
|
|
3ca898a950 | ||
|
|
84688e995a | ||
|
|
4d0940636d | ||
|
|
26b79adc5f | ||
|
|
90aa3561be | ||
|
|
ec59023736 | ||
|
|
4a96cb93d2 | ||
|
|
4c322db9d0 | ||
|
|
ed18c8285f | ||
|
|
5f8cedabd8 | ||
|
|
20923989b9 | ||
|
|
210106cde7 | ||
|
|
87aac277ec | ||
|
|
4de3f408c5 | ||
|
|
439625a49c | ||
|
|
884d72f3d3 | ||
|
|
98c1600e13 | ||
|
|
eb594b7741 | ||
|
|
587ed3444b | ||
|
|
e366a61910 | ||
|
|
5986b71c4d | ||
|
|
cb18bc3067 | ||
|
|
d676ac9084 | ||
|
|
7fcbcb2471 | ||
|
|
c680e50e74 | ||
|
|
9685102229 | ||
|
|
3505b4428a | ||
|
|
9ebdf7f053 | ||
|
|
9ad852c10b | ||
|
|
2a8fff4d93 | ||
|
|
eca560b4e5 | ||
|
|
2f475dddc0 | ||
|
|
ad9d8a12be | ||
|
|
095b22951e | ||
|
|
7350a011e3 | ||
|
|
53b5802add | ||
|
|
54e7077317 | ||
|
|
4cb5071b0b | ||
|
|
96de46cf1e | ||
|
|
7d5592d8d9 | ||
|
|
d0ba8822f3 | ||
|
|
140db73ef4 | ||
|
|
7ae5341c1c | ||
|
|
01940e74b7 | ||
|
|
30210bc40e | ||
|
|
e90a3e2db6 | ||
|
|
5df95730d8 | ||
|
|
67a9c454d0 | ||
|
|
c17493952b | ||
|
|
dd258bd46c | ||
|
|
505c89066b |
138
.github/workflows/release.yml
vendored
138
.github/workflows/release.yml
vendored
@@ -131,15 +131,91 @@ jobs:
|
||||
- name: Install Wails
|
||||
run: go install -v github.com/wailsapp/wails/v2/cmd/wails@latest
|
||||
|
||||
- name: Setup MSYS2 Toolchain For DuckDB (Windows AMD64)
|
||||
id: msys2_duckdb
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
continue-on-error: true
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: UCRT64
|
||||
update: true
|
||||
install: >-
|
||||
mingw-w64-ucrt-x86_64-gcc
|
||||
|
||||
- name: Configure DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
function Find-MingwBin([string[]]$candidates) {
|
||||
foreach ($bin in $candidates) {
|
||||
if ([string]::IsNullOrWhiteSpace($bin)) {
|
||||
continue
|
||||
}
|
||||
$gcc = Join-Path $bin 'gcc.exe'
|
||||
$gxx = Join-Path $bin 'g++.exe'
|
||||
if ((Test-Path $gcc) -and (Test-Path $gxx)) {
|
||||
return $bin
|
||||
}
|
||||
}
|
||||
return $null
|
||||
}
|
||||
|
||||
$msys2Outcome = "${{ steps.msys2_duckdb.outcome }}"
|
||||
$msys2Location = "${{ steps.msys2_duckdb.outputs['msys2-location'] }}"
|
||||
$candidateBins = @()
|
||||
if (-not [string]::IsNullOrWhiteSpace($msys2Location)) {
|
||||
$candidateBins += Join-Path $msys2Location 'ucrt64\bin'
|
||||
}
|
||||
$candidateBins += @(
|
||||
'C:\msys64\ucrt64\bin',
|
||||
'D:\a\_temp\msys64\ucrt64\bin'
|
||||
)
|
||||
$candidateBins = @($candidateBins | Select-Object -Unique)
|
||||
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
if (-not $mingwBin) {
|
||||
if ($msys2Outcome -ne 'success') {
|
||||
Write-Warning "⚠️ MSYS2 安装步骤结果为 $msys2Outcome,回退到 UCRT64 本机路径探测"
|
||||
} else {
|
||||
Write-Warning "⚠️ MSYS2 已执行,但未找到 UCRT64 gcc/g++,回退到本机路径探测"
|
||||
}
|
||||
$mingwBin = Find-MingwBin $candidateBins
|
||||
}
|
||||
|
||||
if (-not $mingwBin) {
|
||||
Write-Error "❌ 未找到可用的 DuckDB UCRT64 编译器。已检查:$($candidateBins -join ', ')"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$gcc = (Join-Path $mingwBin 'gcc.exe')
|
||||
$gxx = (Join-Path $mingwBin 'g++.exe')
|
||||
|
||||
if (!(Test-Path $gcc) -or !(Test-Path $gxx)) {
|
||||
Write-Error "❌ DuckDB 编译器缺失:gcc=$gcc g++=$gxx"
|
||||
exit 1
|
||||
}
|
||||
|
||||
"$mingwBin" | Out-File -FilePath $env:GITHUB_PATH -Append -Encoding utf8
|
||||
"CC=$gcc" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"CXX=$gxx" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
Write-Host "✅ 已配置 DuckDB cgo 编译器: gcc=$gcc g++=$gxx"
|
||||
|
||||
- name: Verify DuckDB CGO Toolchain (Windows AMD64)
|
||||
if: ${{ matrix.build_optional_agents && matrix.platform == 'windows/amd64' }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
& "$env:CC" --version
|
||||
& "$env:CXX" --version
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TAG_ARGS=()
|
||||
if [ -n "${{ matrix.wails_tags }}" ]; then
|
||||
TAG_ARGS+=(-tags "${{ matrix.wails_tags }}")
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -tags "${{ matrix.wails_tags }}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
else
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
fi
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.build_name }} "${TAG_ARGS[@]}" -ldflags "-s -w -X GoNavi-Wails/internal/app.AppVersion=${{ github.ref_name }}"
|
||||
|
||||
- name: Build Optional Driver Agents
|
||||
if: ${{ matrix.build_optional_agents }}
|
||||
@@ -149,12 +225,20 @@ jobs:
|
||||
TARGET_PLATFORM="${{ matrix.platform }}"
|
||||
GOOS="${TARGET_PLATFORM%%/*}"
|
||||
GOARCH="${TARGET_PLATFORM##*/}"
|
||||
DRIVERS=(mariadb diros sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine)
|
||||
DRIVERS=(mariadb doris sphinx sqlserver sqlite duckdb dameng kingbase highgo vastbase mongodb tdengine clickhouse)
|
||||
OUTDIR="drivers/${{ matrix.os_name }}"
|
||||
mkdir -p "$OUTDIR"
|
||||
|
||||
for DRIVER in "${DRIVERS[@]}"; do
|
||||
TAG="gonavi_${DRIVER}_driver"
|
||||
BUILD_DRIVER="$DRIVER"
|
||||
if [ "$DRIVER" = "doris" ]; then
|
||||
BUILD_DRIVER="diros"
|
||||
fi
|
||||
if [ "$DRIVER" = "duckdb" ] && [ "$GOOS" = "windows" ] && [ "$GOARCH" != "amd64" ]; then
|
||||
echo "⚠️ 跳过 DuckDB driver(当前平台 ${GOOS}/${GOARCH} 不受支持,仅支持 windows/amd64)"
|
||||
continue
|
||||
fi
|
||||
TAG="gonavi_${BUILD_DRIVER}_driver"
|
||||
OUTPUT="${DRIVER}-driver-agent-${GOOS}-${GOARCH}"
|
||||
if [ "$GOOS" = "windows" ]; then
|
||||
OUTPUT="${OUTPUT}.exe"
|
||||
@@ -162,20 +246,12 @@ jobs:
|
||||
OUTPUT_PATH="${OUTDIR}/${OUTPUT}"
|
||||
echo "🔧 构建 ${OUTPUT_PATH} (tag=${TAG})"
|
||||
if [ "$DRIVER" = "duckdb" ]; then
|
||||
set +e
|
||||
CGO_ENABLED=1 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
-trimpath \
|
||||
-ldflags "-s -w" \
|
||||
-o "${OUTPUT_PATH}" \
|
||||
./cmd/optional-driver-agent
|
||||
DUCKDB_RC=$?
|
||||
set -e
|
||||
if [ "${DUCKDB_RC}" -ne 0 ]; then
|
||||
echo "⚠️ DuckDB 代理构建失败(平台 ${GOOS}/${GOARCH}),跳过该资产,不阻断发布"
|
||||
rm -f "${OUTPUT_PATH}"
|
||||
continue
|
||||
fi
|
||||
else
|
||||
CGO_ENABLED=0 GOOS="$GOOS" GOARCH="$GOARCH" go build \
|
||||
-tags "${TAG}" \
|
||||
@@ -203,7 +279,9 @@ jobs:
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
|
||||
echo "🔏 正在进行 Ad-hoc 签名..."
|
||||
codesign --force --options runtime --deep --sign - "$APP_NAME"
|
||||
# 注意:Ad-hoc + hardened runtime(--options runtime)在未配置 entitlements 时,
|
||||
# 可能导致部分 macOS 机型上应用双击无响应。这里保持 Ad-hoc 深签名但禁用 runtime hardened。
|
||||
codesign --force --deep --sign - "$APP_NAME"
|
||||
|
||||
DMG_NAME="${{ matrix.build_name }}.dmg"
|
||||
FINAL_NAME="GoNavi-$VERSION-${{ matrix.os_name }}-${{ matrix.arch_name }}${{ matrix.artifact_suffix }}.dmg"
|
||||
@@ -363,6 +441,38 @@ jobs:
|
||||
- name: List Assets
|
||||
run: ls -R release-assets
|
||||
|
||||
- name: Verify Optional Driver Assets
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd release-assets
|
||||
|
||||
REQUIRED_FILES=(
|
||||
"drivers/Windows/duckdb-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/duckdb-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/duckdb-driver-agent-linux-amd64"
|
||||
"drivers/Windows/clickhouse-driver-agent-windows-amd64.exe"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-amd64"
|
||||
"drivers/MacOS/clickhouse-driver-agent-darwin-arm64"
|
||||
"drivers/Linux/clickhouse-driver-agent-linux-amd64"
|
||||
)
|
||||
|
||||
missing=0
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "❌ 缺少驱动资产:$file"
|
||||
missing=1
|
||||
else
|
||||
echo "✅ 已找到驱动资产:$file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$missing" -ne 0 ]; then
|
||||
echo "❌ 可选驱动资产不完整,终止发布"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Package Driver Agents Bundle
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
228
README.md
228
README.md
@@ -1,4 +1,4 @@
|
||||
# GoNavi - 现代化的轻量级数据库管理工具
|
||||
# GoNavi - A Modern Lightweight Database Client
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
@@ -6,11 +6,51 @@
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**GoNavi** 是一款基于 **Wails (Go)** 和 **React** 构建的现代化、高性能、跨平台数据库管理客户端。它旨在提供如原生应用般流畅的用户体验,同时保持极低的资源占用。
|
||||
**Language**: English | [简体中文](README.zh-CN.md)
|
||||
|
||||
相比于 Electron 应用,GoNavi 的体积更小(~10MB),启动速度更快,内存占用更低。
|
||||
GoNavi is a modern, high-performance, cross-platform database client built with **Wails (Go)** and **React**.
|
||||
It delivers native-like responsiveness with low resource usage.
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
Compared with many Electron-based clients, GoNavi is typically smaller in binary size (around 10MB class), starts faster, and uses less memory.
|
||||
|
||||
---
|
||||
|
||||
## Project Overview
|
||||
|
||||
GoNavi is designed for developers and DBAs who need a unified desktop experience across multiple databases.
|
||||
|
||||
- **Native-performance architecture**: Wails (Go + WebView) with lightweight runtime overhead.
|
||||
- **Large dataset usability**: virtualized rendering and optimized DataGrid workflows for high-volume tables.
|
||||
- **Unified connectivity**: URI build/parse, SSH tunnel, proxy support, and on-demand driver activation.
|
||||
- **Production-oriented workflow**: SQL editor, object management, batch export/backup, sync tools, execution logs, and update checks.
|
||||
|
||||
## Supported Data Sources
|
||||
|
||||
> `Built-in`: available out of the box.
|
||||
> `Optional driver agent`: install/enable via Driver Manager first.
|
||||
|
||||
| Category | Data Source | Driver Mode | Typical Capabilities |
|
||||
|---|---|---|---|
|
||||
| Relational | MySQL | Built-in | Schema browsing, SQL query, data editing, export/backup |
|
||||
| Relational | PostgreSQL | Built-in | Schema browsing, SQL query, data editing, object management |
|
||||
| Relational | Oracle | Built-in | Query execution, object browsing, data editing |
|
||||
| Cache | Redis | Built-in | Key browsing, command execution, encoding/view switch |
|
||||
| Relational | MariaDB | Optional driver agent | Querying, object management, data editing |
|
||||
| Relational | Doris | Optional driver agent | Querying, object browsing, SQL execution |
|
||||
| Search | Sphinx | Optional driver agent | SphinxQL querying and object browsing |
|
||||
| Relational | SQL Server | Optional driver agent | Schema browsing, SQL query, object management |
|
||||
| File-based | SQLite | Optional driver agent | Local DB browsing, editing, export |
|
||||
| File-based | DuckDB | Optional driver agent | Large-table query, pagination, file-DB workflow |
|
||||
| Domestic DB | Dameng | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Kingbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | HighGo | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Domestic DB | Vastbase | Optional driver agent | Querying, object browsing, data editing |
|
||||
| Document | MongoDB | Optional driver agent | Document query, collection browsing, connection management |
|
||||
| Time-series | TDengine | Optional driver agent | Time-series schema browsing and querying |
|
||||
| Columnar Analytics | ClickHouse | Optional driver agent | Analytical query, object browsing, SQL execution |
|
||||
| Extensibility | Custom Driver/DSN | Custom | Extend to more data sources via Driver + DSN |
|
||||
|
||||
<h2 align="center">📸 Screenshots</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
@@ -24,137 +64,123 @@
|
||||
|
||||
---
|
||||
|
||||
## ✨ 核心特性
|
||||
## Key Features
|
||||
|
||||
### 🚀 极致性能
|
||||
- **零卡顿交互**:采用独创的 "幽灵拖拽" (Ghost Resizing) 技术,在包含数万行数据的表格中调整列宽,依然保持 60fps+ 的丝滑体验。
|
||||
- **虚拟滚动**:轻松处理海量数据展示,拒绝卡顿。
|
||||
### Performance
|
||||
- **Smooth interaction under load**: optimized table interaction (including column resize workflow on large datasets).
|
||||
- **Virtualized rendering**: keeps large result sets responsive.
|
||||
|
||||
### 🔌 多数据库支持
|
||||
- **MySQL**:完整支持,涵盖数据编辑、结构管理与导入导出。
|
||||
- **PostgreSQL**:数据查看与编辑支持,事务提交能力持续完善。
|
||||
- **SQLite**:本地文件数据库支持。
|
||||
- **Oracle**:基础数据访问与编辑支持。
|
||||
- **Dameng(达梦)**:基础数据访问与编辑支持。
|
||||
- **Kingbase(人大金仓)**:基础数据访问与编辑支持。
|
||||
- **TDengine**:时序数据库连接、库表浏览与 SQL 查询支持。
|
||||
- **Redis**:Key/Value 浏览、命令执行、视图与编码切换。
|
||||
- **自定义驱动**:支持配置 Driver/DSN 接入更多数据源。
|
||||
- **SSH 隧道**:内置 SSH 隧道支持,安全连接内网数据库。
|
||||
### Data Management (DataGrid)
|
||||
- In-place cell editing.
|
||||
- Batch insert/update/delete with transaction-oriented submit/rollback.
|
||||
- Large-field popup editor.
|
||||
- Context actions (set NULL, copy/export, etc.).
|
||||
- Smart read/write mode switching based on query context.
|
||||
- Export formats: CSV, Excel (XLSX), JSON, Markdown.
|
||||
|
||||
### 📊 强大的数据管理 (DataGrid)
|
||||
- **所见即所得编辑**:直接在表格中双击单元格修改数据。
|
||||
- **批量事务操作**:支持批量新增、修改、删除,一键提交或回滚事务。
|
||||
- **大字段编辑**:双击大字段自动打开弹窗编辑器,避免卡顿。
|
||||
- **右键上下文菜单**:快速设置 NULL、复制/导出等操作。
|
||||
- **智能上下文**:自动识别单表查询,解锁编辑功能;复杂查询自动切换为只读模式。
|
||||
- **批量导出/备份**:支持表与数据库的批量导出/备份。
|
||||
- **数据导出**:支持 CSV、Excel (XLSX)、JSON、Markdown 等格式。
|
||||
### SQL Editor
|
||||
- Monaco Editor core.
|
||||
- Context-aware completion for databases/tables/columns.
|
||||
- Multi-tab query workflow.
|
||||
|
||||
### 🧰 批量导出/备份
|
||||
- **数据库批量导出**:支持结构导出与结构+数据备份。
|
||||
- **表批量导出**:支持多表一键导出/备份。
|
||||
- **智能上下文检测**:自动判断目标范围,避免误操作。
|
||||
### Batch Export / Backup
|
||||
- Database-level and table-level batch export/backup.
|
||||
- Scope-aware operation flow to reduce mistakes.
|
||||
|
||||
### 🧩 Redis 视图与编码
|
||||
- **视图模式切换**:自动/原始文本/UTF-8/十六进制多模式显示。
|
||||
- **智能解码**:针对二进制值进行 UTF-8 质量判定与中文字符识别。
|
||||
- **命令执行**:内置命令面板快速操作。
|
||||
### Connectivity
|
||||
- URI generation/parsing.
|
||||
- SSH tunnel support.
|
||||
- Proxy support.
|
||||
- Config import/export (JSON).
|
||||
- Optional driver management and activation.
|
||||
|
||||
### 🔄 数据同步与导入导出
|
||||
- **连接配置导入/导出**:支持配置 JSON 导入导出,便于团队共享。
|
||||
- **数据同步**:内置数据同步面板,支持跨库同步任务配置。
|
||||
### Redis Tools
|
||||
- Multi-view value rendering (auto/raw text/UTF-8/hex).
|
||||
- Built-in command execution panel.
|
||||
|
||||
### 🆙 在线更新
|
||||
- **自动更新**:启动/定时/手动检查更新,自动下载并提示重启完成更新。
|
||||
### Observability and Update
|
||||
- SQL execution logs with timing information.
|
||||
- Startup/scheduled/manual update checks.
|
||||
|
||||
### 🧾 可观测性
|
||||
- **SQL 执行日志**:实时查看 SQL 与执行耗时,便于排障与优化。
|
||||
|
||||
### 📝 智能 SQL 编辑器
|
||||
- **Monaco Editor 内核**:集成 VS Code 同款编辑器,体验极佳。
|
||||
- **智能补全**:自动感知当前连接上下文,提供数据库、表名、字段名的实时补全。
|
||||
- **多标签页**:支持多窗口并行操作,像浏览器一样管理你的查询会话。
|
||||
|
||||
### 🎨 现代化 UI
|
||||
- **Ant Design 5**:企业级 UI 设计语言。
|
||||
- **暗黑模式**:内置深色/浅色主题切换,适应不同光照环境。
|
||||
- **响应式布局**:灵活的侧边栏与布局调整。
|
||||
### UI/UX
|
||||
- Ant Design 5 based interface.
|
||||
- Light/Dark themes.
|
||||
- Flexible sidebar and layout behavior.
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ 技术栈
|
||||
## Tech Stack
|
||||
|
||||
* **后端 (Backend)**: Go 1.24 + Wails v2
|
||||
* **前端 (Frontend)**: React 18 + TypeScript + Vite
|
||||
* **UI 框架**: Ant Design 5
|
||||
* **状态管理**: Zustand
|
||||
* **编辑器**: Monaco Editor
|
||||
- **Backend**: Go 1.24 + Wails v2
|
||||
- **Frontend**: React 18 + TypeScript + Vite
|
||||
- **UI**: Ant Design 5
|
||||
- **State Management**: Zustand
|
||||
- **Editor**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 📦 安装与运行
|
||||
## Installation and Run
|
||||
|
||||
### 前置要求
|
||||
* [Go](https://go.dev/dl/) 1.21+
|
||||
* [Node.js](https://nodejs.org/) 18+
|
||||
* [Wails CLI](https://wails.io/docs/gettingstarted/installation): `go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
### Prerequisites
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
### Development Mode
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
# Clone
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发服务器 (支持热重载)
|
||||
# Start development with hot reload
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# 构建当前平台的可执行文件
|
||||
# Build for current platform
|
||||
wails build
|
||||
|
||||
# 清理并构建 (推荐发布前使用)
|
||||
# Clean build (recommended before release)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物将位于 `build/bin` 目录下。
|
||||
Artifacts are generated in `build/bin`.
|
||||
|
||||
### 跨平台编译 (GitHub Actions)
|
||||
### Cross-Platform Release (GitHub Actions)
|
||||
|
||||
本项目内置了 GitHub Actions 流水线,Push `v*` 格式的 Tag 即可自动触发构建并发布 Release。
|
||||
支持构建:
|
||||
* macOS (AMD64 / ARM64)
|
||||
* Windows (AMD64)
|
||||
* Linux (AMD64,提供 WebKitGTK 4.0 与 4.1 变体产物)
|
||||
The repository includes a release workflow.
|
||||
Push a `v*` tag to trigger automated build and release.
|
||||
|
||||
Target artifacts include:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64, WebKitGTK 4.0 and 4.1 variants)
|
||||
|
||||
---
|
||||
|
||||
## ❓ 常见问题 (Troubleshooting)
|
||||
## Troubleshooting
|
||||
|
||||
### macOS 提示 "应用已损坏,无法打开"
|
||||
### macOS: "App is damaged and can’t be opened"
|
||||
|
||||
由于本项目尚未购买 Apple 开发者证书进行签名(Notarization),macOS 的 Gatekeeper 安全机制可能会拦截应用的运行。请按照以下步骤解决:
|
||||
Without Apple notarization, Gatekeeper may block startup.
|
||||
|
||||
1. 将下载的 `GoNavi.app` 拖入 **应用程序** 文件夹。
|
||||
2. 打开 **终端 (Terminal)**。
|
||||
3. 复制并执行以下命令(输入密码时不会显示):
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
4. 或者:在 Finder 中右键点击应用图标,按住 `Control` 键选择 **打开**,然后在弹出的窗口中再次点击 **打开**。
|
||||
1. Move `GoNavi.app` to **Applications**.
|
||||
2. Open **Terminal**.
|
||||
3. Run:
|
||||
|
||||
### Linux 启动报错缺少 `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
GoNavi 的 Linux 二进制依赖系统 WebKitGTK 运行库。不同发行版默认版本不同:
|
||||
Or right-click the app in Finder and choose **Open** with Control key flow.
|
||||
|
||||
- Debian 13 / Ubuntu 24.04 及更新版本:通常为 WebKitGTK 4.1
|
||||
- Ubuntu 22.04 / Debian 12 等:通常为 WebKitGTK 4.0
|
||||
### Linux: missing `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
如果启动时报错(如 `libwebkit2gtk-4.0.so.37: cannot open shared object file`),请按系统安装对应依赖后重试:
|
||||
GoNavi depends on WebKitGTK runtime libraries.
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
@@ -166,20 +192,20 @@ sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
如果你使用的是 Release 中带 `-WebKit41` 后缀的 Linux 产物,请优先在 Debian 13 / Ubuntu 24.04+ 上使用;普通 Linux 产物更适合 WebKitGTK 4.0 运行环境。
|
||||
If you use Linux artifacts with the `-WebKit41` suffix, prefer Debian 13 / Ubuntu 24.04+.
|
||||
|
||||
---
|
||||
|
||||
## 🤝 贡献指南
|
||||
## Contributing
|
||||
|
||||
欢迎提交 Issue 和 Pull Request!
|
||||
Issues and pull requests are welcome.
|
||||
|
||||
1. Fork 本仓库
|
||||
2. 创建你的特性分支 (`git checkout -b feature/AmazingFeature`)
|
||||
3. 提交你的改动 (`git commit -m 'feat: Add some AmazingFeature'`)
|
||||
4. 推送到分支 (`git push origin feature/AmazingFeature`)
|
||||
5. 开启一个 Pull Request
|
||||
1. Fork the repository.
|
||||
2. Create a feature branch.
|
||||
3. Commit your changes.
|
||||
4. Push to your branch.
|
||||
5. Open a pull request.
|
||||
|
||||
## 📄 开源协议
|
||||
## License
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE) 开源。
|
||||
Licensed under [Apache-2.0](LICENSE).
|
||||
|
||||
194
README.zh-CN.md
Normal file
194
README.zh-CN.md
Normal file
@@ -0,0 +1,194 @@
|
||||
# GoNavi - 现代化轻量级数据库客户端
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
[](https://reactjs.org/)
|
||||
[](LICENSE)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**语言**: [English](README.md) | 简体中文
|
||||
|
||||
GoNavi 是基于 **Wails (Go)** 与 **React** 构建的跨平台数据库管理工具,强调原生性能、低资源占用与多数据源统一工作流。
|
||||
|
||||
相比常见 Electron 客户端,GoNavi 在体积、启动速度和内存占用上更轻量。
|
||||
|
||||
---
|
||||
|
||||
## 项目简介
|
||||
|
||||
GoNavi 面向开发者与 DBA,核心目标是让数据库操作在桌面端做到“快、稳、统一”。
|
||||
|
||||
- **原生性能架构**:Wails(Go + WebView),降低运行时开销。
|
||||
- **大数据可用性**:虚拟滚动 + DataGrid 交互优化,提升大结果集可操作性。
|
||||
- **统一连接能力**:支持 URI 生成/解析、SSH 隧道、代理、驱动按需安装。
|
||||
- **工程化能力完整**:覆盖 SQL 编辑、对象管理、批量导出/备份、数据同步、执行日志、在线更新。
|
||||
|
||||
## 支持的数据源
|
||||
|
||||
> `内置`:主程序开箱即用。
|
||||
> `可选驱动代理`:需在驱动管理中安装启用后可用。
|
||||
|
||||
| 类别 | 数据源 | 驱动模式 | 典型能力 |
|
||||
|---|---|---|---|
|
||||
| 关系型 | MySQL | 内置 | 库表浏览、SQL 查询、数据编辑、导出/备份 |
|
||||
| 关系型 | PostgreSQL | 内置 | 库表浏览、SQL 查询、数据编辑、对象管理 |
|
||||
| 关系型 | Oracle | 内置 | 连接查询、对象浏览、数据编辑 |
|
||||
| 缓存 | Redis | 内置 | Key 浏览、命令执行、编码/视图切换 |
|
||||
| 关系型 | MariaDB | 可选驱动代理 | 连接查询、对象管理、数据编辑 |
|
||||
| 关系型 | Doris | 可选驱动代理 | 连接查询、对象浏览、SQL 执行 |
|
||||
| 搜索 | Sphinx | 可选驱动代理 | SphinxQL 查询与对象浏览 |
|
||||
| 关系型 | SQL Server | 可选驱动代理 | 库表浏览、SQL 查询、对象管理 |
|
||||
| 文件型 | SQLite | 可选驱动代理 | 本地文件库浏览、编辑、导出 |
|
||||
| 文件型 | DuckDB | 可选驱动代理 | 大表查询、分页浏览、文件库管理 |
|
||||
| 国产数据库 | Dameng | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Kingbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | HighGo | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 国产数据库 | Vastbase | 可选驱动代理 | 连接查询、对象浏览、数据编辑 |
|
||||
| 文档型 | MongoDB | 可选驱动代理 | 文档查询、集合浏览、连接管理 |
|
||||
| 时序 | TDengine | 可选驱动代理 | 时序库表浏览、查询分析 |
|
||||
| 列式分析 | ClickHouse | 可选驱动代理 | 分析查询、对象浏览、SQL 执行 |
|
||||
| 扩展接入 | Custom Driver/DSN | 自定义 | 通过 Driver + DSN 接入更多数据源 |
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/224a74e7-65df-4aef-9710-d8e82e3a70c1" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/ec522145-5ceb-4481-ae46-a9251c89bdfc" />
|
||||
<br />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/330ce49b-45f1-4919-ae14-75f7d47e5f73" />
|
||||
<img width="14%" alt="image" src="https://github.com/user-attachments/assets/d15fa9e9-5486-423b-a0e9-53b467e45432" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/f0c57590-d987-4ecf-89b2-64efad60b6d7" />
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## 核心特性
|
||||
|
||||
### 性能与交互
|
||||
- 大数据场景下保持流畅交互(含 DataGrid 列宽拖拽、批量编辑流程优化)。
|
||||
- 虚拟滚动渲染,降低大结果集卡顿风险。
|
||||
|
||||
### 数据管理(DataGrid)
|
||||
- 单元格所见即所得编辑。
|
||||
- 批量新增/修改/删除,支持事务提交与回滚。
|
||||
- 大字段弹窗编辑。
|
||||
- 右键上下文操作(NULL、复制、导出等)。
|
||||
- 根据查询上下文智能切换读写模式。
|
||||
- 支持 CSV / XLSX / JSON / Markdown 导出。
|
||||
|
||||
### SQL 编辑器
|
||||
- 基于 Monaco Editor。
|
||||
- 上下文补全(数据库/表/字段)。
|
||||
- 多标签查询工作流。
|
||||
|
||||
### 连接与驱动
|
||||
- URI 生成与解析。
|
||||
- SSH 隧道、代理支持。
|
||||
- 连接配置 JSON 导入/导出。
|
||||
- 可选驱动安装与启用管理。
|
||||
|
||||
### Redis 工具
|
||||
- 自动/原始文本/UTF-8/十六进制等视图模式。
|
||||
- 内置命令执行面板。
|
||||
|
||||
### 可观测性与更新
|
||||
- SQL 执行日志(含耗时)。
|
||||
- 启动/定时/手动更新检查。
|
||||
|
||||
### UI 体验
|
||||
- Ant Design 5 体系。
|
||||
- 深色/浅色主题切换。
|
||||
- 灵活布局与侧边栏行为。
|
||||
|
||||
---
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **后端**: Go 1.24 + Wails v2
|
||||
- **前端**: React 18 + TypeScript + Vite
|
||||
- **UI 框架**: Ant Design 5
|
||||
- **状态管理**: Zustand
|
||||
- **编辑器**: Monaco Editor
|
||||
|
||||
---
|
||||
|
||||
## 安装与运行
|
||||
|
||||
### 前置要求
|
||||
- [Go](https://go.dev/dl/) 1.21+
|
||||
- [Node.js](https://nodejs.org/) 18+
|
||||
- [Wails CLI](https://wails.io/docs/gettingstarted/installation):
|
||||
`go install github.com/wailsapp/wails/v2/cmd/wails@latest`
|
||||
|
||||
### 开发模式
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
git clone https://github.com/Syngnat/GoNavi.git
|
||||
cd GoNavi
|
||||
|
||||
# 启动开发(热重载)
|
||||
wails dev
|
||||
```
|
||||
|
||||
### 编译构建
|
||||
|
||||
```bash
|
||||
# 构建当前平台
|
||||
wails build
|
||||
|
||||
# 清理后构建(发布前推荐)
|
||||
wails build -clean
|
||||
```
|
||||
|
||||
构建产物位于 `build/bin`。
|
||||
|
||||
### 跨平台发布(GitHub Actions)
|
||||
|
||||
仓库内置发布流水线,推送 `v*` Tag 可自动构建并发布 Release。
|
||||
|
||||
支持目标:
|
||||
- macOS (AMD64 / ARM64)
|
||||
- Windows (AMD64)
|
||||
- Linux (AMD64,含 WebKitGTK 4.0 / 4.1 变体)
|
||||
|
||||
---
|
||||
|
||||
## 常见问题
|
||||
|
||||
### macOS 提示“应用已损坏,无法打开”
|
||||
|
||||
在未进行 Apple Notarization 时,Gatekeeper 可能拦截应用。
|
||||
|
||||
```bash
|
||||
sudo xattr -rd com.apple.quarantine /Applications/GoNavi.app
|
||||
```
|
||||
|
||||
### Linux 缺少 `libwebkit2gtk` / `libjavascriptcoregtk`
|
||||
|
||||
```bash
|
||||
# Debian 13 / Ubuntu 24.04+
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.1-0 libjavascriptcoregtk-4.1-0
|
||||
|
||||
# Ubuntu 22.04 / Debian 12
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-0 libwebkit2gtk-4.0-37 libjavascriptcoregtk-4.0-18
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 贡献指南
|
||||
|
||||
欢迎提交 Issue 与 Pull Request。
|
||||
|
||||
1. Fork 本仓库。
|
||||
2. 创建特性分支。
|
||||
3. 提交改动。
|
||||
4. 推送分支。
|
||||
5. 发起 Pull Request。
|
||||
|
||||
## 开源协议
|
||||
|
||||
本项目采用 [Apache-2.0 协议](LICENSE)。
|
||||
@@ -2,10 +2,13 @@ package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
@@ -16,6 +19,7 @@ type agentRequest struct {
|
||||
Method string `json:"method"`
|
||||
Config *connection.ConnectionConfig `json:"config,omitempty"`
|
||||
Query string `json:"query,omitempty"`
|
||||
TimeoutMs int64 `json:"timeoutMs,omitempty"`
|
||||
DBName string `json:"dbName,omitempty"`
|
||||
TableName string `json:"tableName,omitempty"`
|
||||
Changes *connection.ChangeSet `json:"changes,omitempty"`
|
||||
@@ -47,6 +51,8 @@ const (
|
||||
agentMethodApplyChanges = "applyChanges"
|
||||
)
|
||||
|
||||
const legacyClickHouseDefaultTimeout = 2 * time.Hour
|
||||
|
||||
var (
|
||||
agentDriverType string
|
||||
agentDatabaseFactory func() db.Database
|
||||
@@ -137,14 +143,14 @@ func handleRequest(inst *db.Database, req agentRequest) agentResponse {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
case agentMethodQuery:
|
||||
data, fields, err := (*inst).Query(req.Query)
|
||||
data, fields, err := queryWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
resp.Data = data
|
||||
resp.Fields = fields
|
||||
case agentMethodExec:
|
||||
affected, err := (*inst).Exec(req.Query)
|
||||
affected, err := execWithOptionalTimeout(*inst, req.Query, req.TimeoutMs)
|
||||
if err != nil {
|
||||
return fail(resp, err.Error())
|
||||
}
|
||||
@@ -218,7 +224,11 @@ func handleRequest(inst *db.Database, req agentRequest) agentResponse {
|
||||
}
|
||||
|
||||
func writeResponse(writer *bufio.Writer, resp agentResponse) error {
|
||||
payload, err := json.Marshal(resp)
|
||||
// 对响应数据做统一 JSON 安全归一化:
|
||||
// 将 map[any]any(如 duckdb.Map)递归转换为 map[string]any,避免序列化失败导致代理进程退出。
|
||||
safeResp := resp
|
||||
safeResp.Data = normalizeAgentResponseData(resp.Data)
|
||||
payload, err := json.Marshal(safeResp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -234,3 +244,87 @@ func fail(resp agentResponse, errText string) agentResponse {
|
||||
resp.Error = strings.TrimSpace(errText)
|
||||
return resp
|
||||
}
|
||||
|
||||
func normalizeAgentResponseData(v interface{}) interface{} {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Pointer, reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return normalizeAgentResponseData(rv.Elem().Interface())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
out := make(map[string]interface{}, rv.Len())
|
||||
iter := rv.MapRange()
|
||||
for iter.Next() {
|
||||
out[fmt.Sprint(iter.Key().Interface())] = normalizeAgentResponseData(iter.Value().Interface())
|
||||
}
|
||||
return out
|
||||
case reflect.Slice:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
// 保持 []byte 原样,避免改变现有二进制列的 JSON 编码行为(base64)。
|
||||
if rv.Type().Elem().Kind() == reflect.Uint8 {
|
||||
return v
|
||||
}
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
case reflect.Array:
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeAgentResponseData(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func queryWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) ([]map[string]interface{}, []string, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Query(query)
|
||||
}
|
||||
if q, ok := inst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return q.QueryContext(ctx, query)
|
||||
}
|
||||
return inst.Query(query)
|
||||
}
|
||||
|
||||
func execWithOptionalTimeout(inst db.Database, query string, timeoutMs int64) (int64, error) {
|
||||
effectiveTimeoutMs := timeoutMs
|
||||
if effectiveTimeoutMs <= 0 && strings.EqualFold(strings.TrimSpace(agentDriverType), "clickhouse") {
|
||||
effectiveTimeoutMs = int64(legacyClickHouseDefaultTimeout / time.Millisecond)
|
||||
}
|
||||
if effectiveTimeoutMs <= 0 {
|
||||
return inst.Exec(query)
|
||||
}
|
||||
if e, ok := inst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(effectiveTimeoutMs)*time.Millisecond)
|
||||
defer cancel()
|
||||
return e.ExecContext(ctx, query)
|
||||
}
|
||||
return inst.Exec(query)
|
||||
}
|
||||
|
||||
172
cmd/optional-driver-agent/main_test.go
Normal file
172
cmd/optional-driver-agent/main_test.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type duckMapLike map[any]any
|
||||
|
||||
func TestWriteResponse_NormalizesMapAnyAny(t *testing.T) {
|
||||
resp := agentResponse{
|
||||
ID: 1,
|
||||
Success: true,
|
||||
Data: []map[string]interface{}{
|
||||
{
|
||||
"id": int64(7),
|
||||
"meta": duckMapLike{"k": "v", 2: "two"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var out bytes.Buffer
|
||||
writer := bufio.NewWriter(&out)
|
||||
if err := writeResponse(writer, resp); err != nil {
|
||||
t.Fatalf("writeResponse 返回错误: %v", err)
|
||||
}
|
||||
|
||||
var decoded struct {
|
||||
Data []map[string]interface{} `json:"data"`
|
||||
}
|
||||
if err := json.Unmarshal(bytes.TrimSpace(out.Bytes()), &decoded); err != nil {
|
||||
t.Fatalf("解码响应失败: %v", err)
|
||||
}
|
||||
|
||||
if len(decoded.Data) != 1 {
|
||||
t.Fatalf("期望 1 行数据,实际 %d", len(decoded.Data))
|
||||
}
|
||||
meta, ok := decoded.Data[0]["meta"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("meta 字段类型异常: %T", decoded.Data[0]["meta"])
|
||||
}
|
||||
if meta["k"] != "v" {
|
||||
t.Fatalf("字符串 key 转换异常: %v", meta["k"])
|
||||
}
|
||||
if meta["2"] != "two" {
|
||||
t.Fatalf("数字 key 未字符串化: %v", meta["2"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeAgentResponseData_KeepByteSlice(t *testing.T) {
|
||||
raw := []byte{0x61, 0x62, 0x63}
|
||||
normalized := normalizeAgentResponseData(raw)
|
||||
out, ok := normalized.([]byte)
|
||||
if !ok {
|
||||
t.Fatalf("期望 []byte,实际 %T", normalized)
|
||||
}
|
||||
if !bytes.Equal(out, raw) {
|
||||
t.Fatalf("[]byte 内容被意外改写: %v", out)
|
||||
}
|
||||
}
|
||||
|
||||
type fakeAgentTimeoutDB struct {
|
||||
queryCalled bool
|
||||
queryContextCalled bool
|
||||
execCalled bool
|
||||
execContextCalled bool
|
||||
deadlineSet bool
|
||||
}
|
||||
|
||||
func (f *fakeAgentTimeoutDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Close() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Ping() error { return nil }
|
||||
func (f *fakeAgentTimeoutDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryCalled = true
|
||||
return nil, nil, errors.New("query should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.queryContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return []map[string]interface{}{{"ok": 1}}, []string{"ok"}, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) Exec(query string) (int64, error) {
|
||||
f.execCalled = true
|
||||
return 0, errors.New("exec should not be called")
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
f.execContextCalled = true
|
||||
if _, ok := ctx.Deadline(); ok {
|
||||
f.deadlineSet = true
|
||||
}
|
||||
return 3, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeAgentTimeoutDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeAgentTimeoutDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_UsesQueryContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
data, fields, err := queryWithOptionalTimeout(fake, "SELECT 1", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("queryWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if len(data) != 1 || len(fields) != 1 || fields[0] != "ok" {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回数据异常: data=%v fields=%v", data, fields)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecWithOptionalTimeout_UsesExecContext(t *testing.T) {
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
affected, err := execWithOptionalTimeout(fake, "DELETE FROM t", int64((2 * time.Second).Milliseconds()))
|
||||
if err != nil {
|
||||
t.Fatalf("execWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.execContextCalled || fake.execCalled {
|
||||
t.Fatalf("exec 调用路径异常,ExecContext=%v Exec=%v", fake.execContextCalled, fake.execCalled)
|
||||
}
|
||||
if !fake.deadlineSet {
|
||||
t.Fatal("execWithOptionalTimeout 未设置 deadline")
|
||||
}
|
||||
if affected != 3 {
|
||||
t.Fatalf("受影响行数异常,want=3 got=%d", affected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryWithOptionalTimeout_ClickHouseLegacyModeUsesQueryContext(t *testing.T) {
|
||||
old := agentDriverType
|
||||
agentDriverType = "clickhouse"
|
||||
defer func() { agentDriverType = old }()
|
||||
|
||||
fake := &fakeAgentTimeoutDB{}
|
||||
_, _, err := queryWithOptionalTimeout(fake, "SELECT 1", 0)
|
||||
if err != nil {
|
||||
t.Fatalf("queryWithOptionalTimeout 返回错误: %v", err)
|
||||
}
|
||||
if !fake.queryContextCalled || fake.queryCalled {
|
||||
t.Fatalf("clickhouse legacy query 调用路径异常,QueryContext=%v Query=%v", fake.queryContextCalled, fake.queryCalled)
|
||||
}
|
||||
}
|
||||
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
12
cmd/optional-driver-agent/provider_clickhouse.go
Normal file
@@ -0,0 +1,12 @@
|
||||
//go:build gonavi_clickhouse_driver
|
||||
|
||||
package main
|
||||
|
||||
import "GoNavi-Wails/internal/db"
|
||||
|
||||
func init() {
|
||||
agentDriverType = "clickhouse"
|
||||
agentDatabaseFactory = func() db.Database {
|
||||
return &db.ClickHouseDB{}
|
||||
}
|
||||
}
|
||||
@@ -3,79 +3,85 @@
|
||||
"drivers": {
|
||||
"mariadb": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/mariadb"
|
||||
},
|
||||
"diros": {
|
||||
"doris": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/diros"
|
||||
"downloadUrl": "builtin://activate/doris"
|
||||
},
|
||||
"sphinx": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.9.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sphinx"
|
||||
},
|
||||
"sqlserver": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.9.6",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sqlserver"
|
||||
},
|
||||
"sqlite": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.44.3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/sqlite"
|
||||
},
|
||||
"duckdb": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "2.5.6",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/duckdb"
|
||||
},
|
||||
"dameng": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.8.22",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/dameng"
|
||||
},
|
||||
"kingbase": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "0.0.0-20201021123113-29bd62a876c3",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/kingbase"
|
||||
},
|
||||
"highgo": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "0.0.0-local",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/highgo"
|
||||
},
|
||||
"vastbase": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.11.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/vastbase"
|
||||
},
|
||||
"mongodb": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "2.5.0",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/mongodb"
|
||||
},
|
||||
"tdengine": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "3.7.8",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/tdengine"
|
||||
},
|
||||
"clickhouse": {
|
||||
"engine": "go",
|
||||
"version": "2.43.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/clickhouse"
|
||||
},
|
||||
"postgres": {
|
||||
"engine": "go",
|
||||
"version": "go-embedded",
|
||||
"version": "1.11.1",
|
||||
"checksumPolicy": "off",
|
||||
"downloadUrl": "builtin://activate/postgres"
|
||||
}
|
||||
|
||||
@@ -57,6 +57,29 @@ body[data-theme='dark'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: #666;
|
||||
}
|
||||
|
||||
/* Scrollbar styling for light mode (transparent-friendly) */
|
||||
body[data-theme='light'] ::-webkit-scrollbar {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.18);
|
||||
border-radius: 4px;
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
body[data-theme='light'] ::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(0, 0, 0, 0.30);
|
||||
border: 2px solid transparent;
|
||||
background-clip: content-box;
|
||||
}
|
||||
|
||||
/* Ensure body background matches theme to avoid white flashes, but kept transparent for window composition */
|
||||
body {
|
||||
transition: color 0.3s;
|
||||
@@ -67,6 +90,51 @@ body[data-theme='dark'] {
|
||||
在透明窗口环境下会显著加剧 GPU 负载 */
|
||||
}
|
||||
|
||||
/* 暗色 + 透明:提升选中/焦点可读性,避免默认蓝色在半透明背景下发灰 */
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected,
|
||||
body[data-theme='dark'] .ant-tree .ant-tree-node-content-wrapper.ant-tree-node-selected:hover {
|
||||
background: rgba(246, 196, 83, 0.24) !important;
|
||||
color: rgba(255, 236, 179, 0.98) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-checked .ant-checkbox-inner {
|
||||
background-color: #f6c453 !important;
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox-indeterminate .ant-checkbox-inner::after {
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-checkbox:hover .ant-checkbox-inner,
|
||||
body[data-theme='dark'] .ant-checkbox-wrapper:hover .ant-checkbox-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-checked .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
background-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-radio-wrapper:hover .ant-radio-inner,
|
||||
body[data-theme='dark'] .ant-radio:hover .ant-radio-inner {
|
||||
border-color: #f6c453 !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-switch.ant-switch-checked {
|
||||
background: #d8a93b !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.18) !important;
|
||||
}
|
||||
|
||||
body[data-theme='dark'] .ant-table-tbody > tr.ant-table-row-selected:hover > td,
|
||||
body[data-theme='dark'] .ant-table-tbody .ant-table-row.ant-table-row-selected:hover > .ant-table-cell {
|
||||
background: rgba(246, 196, 83, 0.26) !important;
|
||||
}
|
||||
|
||||
/* 连接配置弹窗:滚动仅在弹窗 body 内部,不使用外层 wrap 滚动条 */
|
||||
.connection-modal-wrap {
|
||||
overflow: hidden !important;
|
||||
@@ -92,3 +160,53 @@ body[data-theme='dark'] {
|
||||
background-color: #ff4d4f !important;
|
||||
color: #fff !important;
|
||||
}
|
||||
|
||||
/* 驱动管理:统一关闭 antd sticky 横向条,仅保留自定义独立横向条 */
|
||||
.driver-manager-table .ant-table-sticky-scroll {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* 仅在独立横向条激活时隐藏表格自身横向滚动条,避免出现双横向条 */
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body {
|
||||
overflow-x: auto !important;
|
||||
-ms-overflow-style: none;
|
||||
scrollbar-width: none;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-content::-webkit-scrollbar:horizontal,
|
||||
.driver-manager-table-wrap.driver-manager-table-wrap-external-active .driver-manager-table .ant-table-body::-webkit-scrollbar:horizontal {
|
||||
height: 0 !important;
|
||||
}
|
||||
|
||||
.driver-manager-table-wrap {
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.driver-manager-footer {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.driver-manager-footer-actions {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll {
|
||||
width: 100%;
|
||||
height: 12px;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
scrollbar-gutter: stable;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.driver-manager-hscroll-inner {
|
||||
height: 1px;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,20 +2,28 @@ import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Input, InputNumber, Button, message, Checkbox, Divider, Select, Alert, Card, Row, Col, Typography, Collapse, Space, Table, Tag } from 'antd';
|
||||
import { DatabaseOutlined, ConsoleSqlOutlined, FileTextOutlined, CloudServerOutlined, AppstoreAddOutlined, CloudOutlined, CheckCircleFilled, CloseCircleFilled } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, GetDriverStatusList, MongoDiscoverMembers, TestConnection, RedisConnect, SelectSSHKeyFile } from '../../wailsjs/go/app/App';
|
||||
import { MongoMemberInfo, SavedConnection } from '../types';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
import { DBGetDatabases, GetDriverStatusList, MongoDiscoverMembers, TestConnection, RedisConnect, SelectDatabaseFile, SelectSSHKeyFile } from '../../wailsjs/go/app/App';
|
||||
import { ConnectionConfig, MongoMemberInfo, SavedConnection } from '../types';
|
||||
|
||||
const { Meta } = Card;
|
||||
const { Text } = Typography;
|
||||
const MAX_URI_LENGTH = 4096;
|
||||
const MAX_URI_HOSTS = 32;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const STEP1_MODAL_WIDTH = 760;
|
||||
const STEP2_MODAL_WIDTH = 680;
|
||||
const STEP1_MODAL_MIN_BODY_HEIGHT = 460;
|
||||
const STEP1_SIDEBAR_DIVIDER_DARK = 'rgba(255, 255, 255, 0.16)';
|
||||
const STEP1_SIDEBAR_DIVIDER_LIGHT = 'rgba(0, 0, 0, 0.08)';
|
||||
|
||||
const getDefaultPortByType = (type: string) => {
|
||||
switch (type) {
|
||||
case 'mysql': return 3306;
|
||||
case 'doris':
|
||||
case 'diros': return 9030;
|
||||
case 'sphinx': return 9306;
|
||||
case 'clickhouse': return 9000;
|
||||
case 'postgres': return 5432;
|
||||
case 'redis': return 6379;
|
||||
case 'tdengine': return 6041;
|
||||
@@ -33,6 +41,19 @@ const getDefaultPortByType = (type: string) => {
|
||||
}
|
||||
};
|
||||
|
||||
const singleHostUriSchemesByType: Record<string, string[]> = {
|
||||
postgres: ['postgresql', 'postgres'],
|
||||
clickhouse: ['clickhouse'],
|
||||
oracle: ['oracle'],
|
||||
sqlserver: ['sqlserver'],
|
||||
redis: ['redis'],
|
||||
tdengine: ['tdengine'],
|
||||
dameng: ['dameng', 'dm'],
|
||||
kingbase: ['kingbase'],
|
||||
highgo: ['highgo'],
|
||||
vastbase: ['vastbase'],
|
||||
};
|
||||
|
||||
const isFileDatabaseType = (type: string) => type === 'sqlite' || type === 'duckdb';
|
||||
|
||||
type DriverStatusSnapshot = {
|
||||
@@ -58,6 +79,7 @@ const ConnectionModal: React.FC<{
|
||||
const [form] = Form.useForm();
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [useSSH, setUseSSH] = useState(false);
|
||||
const [useProxy, setUseProxy] = useState(false);
|
||||
const [dbType, setDbType] = useState('mysql');
|
||||
const [step, setStep] = useState(1); // 1: Select Type, 2: Configure
|
||||
const [activeGroup, setActiveGroup] = useState(0); // Active category index in step 1
|
||||
@@ -71,14 +93,43 @@ const ConnectionModal: React.FC<{
|
||||
const [typeSelectWarning, setTypeSelectWarning] = useState<{ driverName: string; reason: string } | null>(null);
|
||||
const [driverStatusMap, setDriverStatusMap] = useState<Record<string, DriverStatusSnapshot>>({});
|
||||
const [driverStatusLoaded, setDriverStatusLoaded] = useState(false);
|
||||
const [selectingDbFile, setSelectingDbFile] = useState(false);
|
||||
const [selectingSSHKey, setSelectingSSHKey] = useState(false);
|
||||
const testInFlightRef = useRef(false);
|
||||
const testTimerRef = useRef<number | null>(null);
|
||||
const addConnection = useStore((state) => state.addConnection);
|
||||
const updateConnection = useStore((state) => state.updateConnection);
|
||||
const theme = useStore((state) => state.theme);
|
||||
const appearance = useStore((state) => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const effectiveOpacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const mysqlTopology = Form.useWatch('mysqlTopology', form) || 'single';
|
||||
const mongoTopology = Form.useWatch('mongoTopology', form) || 'single';
|
||||
const mongoSrv = Form.useWatch('mongoSrv', form) || false;
|
||||
const redisTopology = Form.useWatch('redisTopology', form) || 'single';
|
||||
|
||||
const getSectionBg = (darkHex: string) => {
|
||||
if (!darkMode) {
|
||||
return `rgba(245, 245, 245, ${Math.max(effectiveOpacity, 0.92)})`;
|
||||
}
|
||||
const hex = darkHex.replace('#', '');
|
||||
const r = parseInt(hex.substring(0, 2), 16);
|
||||
const g = parseInt(hex.substring(2, 4), 16);
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${Math.max(effectiveOpacity, 0.82)})`;
|
||||
};
|
||||
|
||||
const step1SidebarDividerColor = darkMode ? STEP1_SIDEBAR_DIVIDER_DARK : STEP1_SIDEBAR_DIVIDER_LIGHT;
|
||||
const step1SidebarActiveBg = darkMode ? 'rgba(246, 196, 83, 0.20)' : '#e6f4ff';
|
||||
const step1SidebarActiveColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
|
||||
const tunnelSectionStyle: React.CSSProperties = {
|
||||
padding: '12px',
|
||||
background: getSectionBg('#2a2a2a'),
|
||||
borderRadius: 6,
|
||||
marginTop: 12,
|
||||
border: darkMode ? '1px solid rgba(255, 255, 255, 0.16)' : '1px solid rgba(0, 0, 0, 0.06)',
|
||||
};
|
||||
|
||||
const fetchDriverStatusMap = async (): Promise<Record<string, DriverStatusSnapshot>> => {
|
||||
const result: Record<string, DriverStatusSnapshot> = {};
|
||||
@@ -309,6 +360,41 @@ const ConnectionModal: React.FC<{
|
||||
};
|
||||
};
|
||||
|
||||
const parseSingleHostUri = (
|
||||
uriText: string,
|
||||
expectedSchemes: string[],
|
||||
defaultPort: number,
|
||||
): { host: string; port: number; username: string; password: string; database: string } | null => {
|
||||
let parsed: ReturnType<typeof parseMultiHostUri> | null = null;
|
||||
for (const scheme of expectedSchemes) {
|
||||
parsed = parseMultiHostUri(uriText, scheme);
|
||||
if (parsed) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
if (!parsed.hosts.length || parsed.hosts.length > MAX_URI_HOSTS) {
|
||||
return null;
|
||||
}
|
||||
if (parsed.hosts.some((entry) => !isValidUriHostEntry(entry))) {
|
||||
return null;
|
||||
}
|
||||
const hostList = normalizeAddressList(parsed.hosts, defaultPort);
|
||||
if (!hostList.length) {
|
||||
return null;
|
||||
}
|
||||
const primary = parseHostPort(hostList[0] || `localhost:${defaultPort}`, defaultPort);
|
||||
return {
|
||||
host: primary?.host || 'localhost',
|
||||
port: primary?.port || defaultPort,
|
||||
username: parsed.username,
|
||||
password: parsed.password,
|
||||
database: parsed.database || '',
|
||||
};
|
||||
};
|
||||
|
||||
const parseUriToValues = (uriText: string, type: string): Record<string, any> | null => {
|
||||
const trimmedUri = String(uriText || '').trim();
|
||||
if (!trimmedUri) {
|
||||
@@ -364,6 +450,35 @@ const ConnectionModal: React.FC<{
|
||||
return { host: normalizeFileDbPath(safeDecode(rawPath)) };
|
||||
}
|
||||
|
||||
if (type === 'redis') {
|
||||
const parsed = parseMultiHostUri(trimmedUri, 'redis');
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
if (!parsed.hosts.length || parsed.hosts.length > MAX_URI_HOSTS) {
|
||||
return null;
|
||||
}
|
||||
if (parsed.hosts.some((entry) => !isValidUriHostEntry(entry))) {
|
||||
return null;
|
||||
}
|
||||
const hostList = normalizeAddressList(parsed.hosts, 6379);
|
||||
if (!hostList.length) {
|
||||
return null;
|
||||
}
|
||||
const primary = parseHostPort(hostList[0] || 'localhost:6379', 6379);
|
||||
const topologyParam = String(parsed.params.get('topology') || '').toLowerCase();
|
||||
const dbText = String(parsed.database || '').trim().replace(/^\//, '');
|
||||
const dbIndex = Number(dbText);
|
||||
return {
|
||||
host: primary?.host || 'localhost',
|
||||
port: primary?.port || 6379,
|
||||
password: parsed.password || '',
|
||||
redisTopology: hostList.length > 1 || topologyParam === 'cluster' ? 'cluster' : 'single',
|
||||
redisHosts: hostList.slice(1),
|
||||
redisDB: Number.isFinite(dbIndex) && dbIndex >= 0 && dbIndex <= 15 ? Math.trunc(dbIndex) : 0,
|
||||
};
|
||||
}
|
||||
|
||||
if (type === 'mongodb') {
|
||||
const parsed = parseMultiHostUri(trimmedUri, 'mongodb') || parseMultiHostUri(trimmedUri, 'mongodb+srv');
|
||||
if (!parsed) {
|
||||
@@ -406,6 +521,25 @@ const ConnectionModal: React.FC<{
|
||||
};
|
||||
}
|
||||
|
||||
const singleHostSchemes = singleHostUriSchemesByType[type];
|
||||
if (singleHostSchemes && singleHostSchemes.length > 0) {
|
||||
const parsed = parseSingleHostUri(trimmedUri, singleHostSchemes, getDefaultPortByType(type));
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
if (type === 'oracle' && !String(parsed.database || '').trim()) {
|
||||
// Oracle 需要显式 service name,避免 URI 解析后放过必填校验。
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
host: parsed.host,
|
||||
port: parsed.port,
|
||||
user: parsed.username,
|
||||
password: parsed.password,
|
||||
database: parsed.database,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
@@ -429,7 +563,7 @@ const ConnectionModal: React.FC<{
|
||||
const getUriPlaceholder = () => {
|
||||
if (dbType === 'mysql' || dbType === 'mariadb' || dbType === 'diros' || dbType === 'sphinx') {
|
||||
const defaultPort = getDefaultPortByType(dbType);
|
||||
const scheme = dbType === 'diros' ? 'diros' : 'mysql';
|
||||
const scheme = dbType === 'diros' ? 'doris' : 'mysql';
|
||||
return `${scheme}://user:pass@127.0.0.1:${defaultPort},127.0.0.2:${defaultPort}/db_name?topology=replica`;
|
||||
}
|
||||
if (isFileDatabaseType(dbType)) {
|
||||
@@ -440,6 +574,15 @@ const ConnectionModal: React.FC<{
|
||||
if (dbType === 'mongodb') {
|
||||
return 'mongodb+srv://user:pass@cluster0.example.com/db_name?authSource=admin&authMechanism=SCRAM-SHA-256';
|
||||
}
|
||||
if (dbType === 'clickhouse') {
|
||||
return 'clickhouse://default:pass@127.0.0.1:9000/default';
|
||||
}
|
||||
if (dbType === 'redis') {
|
||||
return 'redis://:pass@127.0.0.1:6379,127.0.0.2:6379/0?topology=cluster';
|
||||
}
|
||||
if (dbType === 'oracle') {
|
||||
return 'oracle://user:pass@127.0.0.1:1521/ORCLPDB1';
|
||||
}
|
||||
return '例如: postgres://user:pass@127.0.0.1:5432/db_name';
|
||||
};
|
||||
|
||||
@@ -471,10 +614,30 @@ const ConnectionModal: React.FC<{
|
||||
}
|
||||
const dbPath = database ? `/${encodeURIComponent(database)}` : '/';
|
||||
const query = params.toString();
|
||||
const scheme = type === 'diros' ? 'diros' : 'mysql';
|
||||
const scheme = type === 'diros' ? 'doris' : 'mysql';
|
||||
return `${scheme}://${encodedAuth}${hosts.join(',')}${dbPath}${query ? `?${query}` : ''}`;
|
||||
}
|
||||
|
||||
if (type === 'redis') {
|
||||
const primary = toAddress(host, port, 6379);
|
||||
const clusterHosts = values.redisTopology === 'cluster'
|
||||
? normalizeAddressList(values.redisHosts, 6379)
|
||||
: [];
|
||||
const hosts = normalizeAddressList([primary, ...clusterHosts], 6379);
|
||||
const params = new URLSearchParams();
|
||||
if (hosts.length > 1 || values.redisTopology === 'cluster') {
|
||||
params.set('topology', 'cluster');
|
||||
}
|
||||
const redisPassword = String(values.password || '');
|
||||
const redisAuth = redisPassword ? `:${encodeURIComponent(redisPassword)}@` : '';
|
||||
const redisDB = Number.isFinite(Number(values.redisDB))
|
||||
? Math.max(0, Math.min(15, Math.trunc(Number(values.redisDB))))
|
||||
: 0;
|
||||
const dbPath = `/${redisDB}`;
|
||||
const query = params.toString();
|
||||
return `redis://${redisAuth}${hosts.join(',')}${dbPath}${query ? `?${query}` : ''}`;
|
||||
}
|
||||
|
||||
if (isFileDatabaseType(type)) {
|
||||
const pathText = normalizeFileDbPath(String(values.host || '').trim());
|
||||
if (!pathText) {
|
||||
@@ -603,6 +766,30 @@ const ConnectionModal: React.FC<{
|
||||
}
|
||||
};
|
||||
|
||||
const handleSelectDatabaseFile = async () => {
|
||||
if (selectingDbFile) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
setSelectingDbFile(true);
|
||||
const currentPath = String(form.getFieldValue('host') || '').trim();
|
||||
const res = await SelectDatabaseFile(currentPath, dbType);
|
||||
if (res?.success) {
|
||||
const data = res.data || {};
|
||||
const selectedPath = typeof data === 'string' ? data : String(data.path || '').trim();
|
||||
if (selectedPath) {
|
||||
form.setFieldValue('host', normalizeFileDbPath(selectedPath));
|
||||
}
|
||||
} else if (res?.message !== 'Cancelled') {
|
||||
message.error(`选择数据库文件失败: ${res?.message || '未知错误'}`);
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error(`选择数据库文件失败: ${e?.message || String(e)}`);
|
||||
} finally {
|
||||
setSelectingDbFile(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setTestResult(null); // Reset test result
|
||||
@@ -636,8 +823,10 @@ const ConnectionModal: React.FC<{
|
||||
: (primaryAddress?.port || Number(config.port || defaultPort));
|
||||
const mysqlReplicaHosts = (configType === 'mysql' || configType === 'mariadb' || configType === 'diros' || configType === 'sphinx') ? normalizedHosts.slice(1) : [];
|
||||
const mongoHosts = configType === 'mongodb' ? normalizedHosts.slice(1) : [];
|
||||
const redisHosts = configType === 'redis' ? normalizedHosts.slice(1) : [];
|
||||
const mysqlIsReplica = String(config.topology || '').toLowerCase() === 'replica' || mysqlReplicaHosts.length > 0;
|
||||
const mongoIsReplica = String(config.topology || '').toLowerCase() === 'replica' || mongoHosts.length > 0 || !!config.replicaSet;
|
||||
const redisIsCluster = String(config.topology || '').toLowerCase() === 'cluster' || redisHosts.length > 0;
|
||||
form.setFieldsValue({
|
||||
type: configType,
|
||||
name: initialValues.name,
|
||||
@@ -655,6 +844,12 @@ const ConnectionModal: React.FC<{
|
||||
sshUser: config.ssh?.user,
|
||||
sshPassword: config.ssh?.password,
|
||||
sshKeyPath: config.ssh?.keyPath,
|
||||
useProxy: config.useProxy,
|
||||
proxyType: config.proxy?.type || 'socks5',
|
||||
proxyHost: config.proxy?.host,
|
||||
proxyPort: config.proxy?.port,
|
||||
proxyUser: config.proxy?.user,
|
||||
proxyPassword: config.proxy?.password,
|
||||
driver: config.driver,
|
||||
dsn: config.dsn,
|
||||
timeout: config.timeout || 30,
|
||||
@@ -664,16 +859,20 @@ const ConnectionModal: React.FC<{
|
||||
mysqlReplicaPassword: config.mysqlReplicaPassword || '',
|
||||
mongoTopology: mongoIsReplica ? 'replica' : 'single',
|
||||
mongoHosts: mongoHosts,
|
||||
redisTopology: redisIsCluster ? 'cluster' : 'single',
|
||||
redisHosts: redisHosts,
|
||||
mongoSrv: !!config.mongoSrv,
|
||||
mongoReplicaSet: config.replicaSet || '',
|
||||
mongoAuthSource: config.authSource || '',
|
||||
mongoReadPreference: config.readPreference || 'primary',
|
||||
mongoAuthMechanism: config.mongoAuthMechanism || '',
|
||||
savePassword: config.savePassword !== false,
|
||||
redisDB: Number.isFinite(Number(config.redisDB)) ? Number(config.redisDB) : 0,
|
||||
mongoReplicaUser: config.mongoReplicaUser || '',
|
||||
mongoReplicaPassword: config.mongoReplicaPassword || ''
|
||||
});
|
||||
setUseSSH(config.useSSH || false);
|
||||
setUseProxy(config.useProxy || false);
|
||||
setDbType(configType);
|
||||
// 如果是 Redis 编辑模式,设置已保存的 Redis 数据库列表
|
||||
if (configType === 'redis') {
|
||||
@@ -684,6 +883,7 @@ const ConnectionModal: React.FC<{
|
||||
setStep(1);
|
||||
form.resetFields();
|
||||
setUseSSH(false);
|
||||
setUseProxy(false);
|
||||
setDbType('mysql');
|
||||
setActiveGroup(0);
|
||||
}
|
||||
@@ -733,6 +933,7 @@ const ConnectionModal: React.FC<{
|
||||
setLoading(false);
|
||||
form.resetFields();
|
||||
setUseSSH(false);
|
||||
setUseProxy(false);
|
||||
setDbType('mysql');
|
||||
setStep(1);
|
||||
onClose();
|
||||
@@ -781,7 +982,6 @@ const ConnectionModal: React.FC<{
|
||||
if (res.success) {
|
||||
setTestResult({ type: 'success', message: res.message });
|
||||
if (isRedisType) {
|
||||
// Redis: generate database list 0-15
|
||||
setRedisDbList(Array.from({ length: 16 }, (_, i) => i));
|
||||
} else {
|
||||
// Other databases: fetch database list
|
||||
@@ -852,7 +1052,7 @@ const ConnectionModal: React.FC<{
|
||||
}
|
||||
};
|
||||
|
||||
const buildConfig = async (values: any, forPersist: boolean) => {
|
||||
const buildConfig = async (values: any, forPersist: boolean): Promise<ConnectionConfig> => {
|
||||
const mergedValues = { ...values };
|
||||
const parsedUriValues = parseUriToValues(mergedValues.uri, mergedValues.type);
|
||||
const isEmptyField = (value: unknown) => (
|
||||
@@ -890,7 +1090,7 @@ const ConnectionModal: React.FC<{
|
||||
}
|
||||
|
||||
let hosts: string[] = [];
|
||||
let topology: 'single' | 'replica' | undefined;
|
||||
let topology: 'single' | 'replica' | 'cluster' | undefined;
|
||||
let replicaSet = '';
|
||||
let authSource = '';
|
||||
let readPreference = '';
|
||||
@@ -944,6 +1144,22 @@ const ConnectionModal: React.FC<{
|
||||
mongoAuthMechanism = String(mergedValues.mongoAuthMechanism || '').trim().toUpperCase();
|
||||
}
|
||||
|
||||
if (type === 'redis') {
|
||||
const clusterNodes = mergedValues.redisTopology === 'cluster'
|
||||
? normalizeAddressList(mergedValues.redisHosts, defaultPort)
|
||||
: [];
|
||||
const allHosts = normalizeAddressList([`${primaryHost}:${primaryPort}`, ...clusterNodes], defaultPort);
|
||||
if (mergedValues.redisTopology === 'cluster' || allHosts.length > 1) {
|
||||
hosts = allHosts;
|
||||
topology = 'cluster';
|
||||
} else {
|
||||
topology = 'single';
|
||||
}
|
||||
mergedValues.redisDB = Number.isFinite(Number(mergedValues.redisDB))
|
||||
? Math.max(0, Math.min(15, Math.trunc(Number(mergedValues.redisDB))))
|
||||
: 0;
|
||||
}
|
||||
|
||||
const sshConfig = mergedValues.useSSH ? {
|
||||
host: mergedValues.sshHost,
|
||||
port: Number(mergedValues.sshPort),
|
||||
@@ -951,6 +1167,22 @@ const ConnectionModal: React.FC<{
|
||||
password: mergedValues.sshPassword || "",
|
||||
keyPath: mergedValues.sshKeyPath || ""
|
||||
} : { host: "", port: 22, user: "", password: "", keyPath: "" };
|
||||
const effectiveUseProxy = !isFileDbType && !!mergedValues.useProxy;
|
||||
const proxyTypeRaw = String(mergedValues.proxyType || 'socks5').toLowerCase();
|
||||
const proxyType: 'socks5' | 'http' = proxyTypeRaw === 'http' ? 'http' : 'socks5';
|
||||
const proxyConfig: NonNullable<ConnectionConfig['proxy']> = effectiveUseProxy ? {
|
||||
type: proxyType,
|
||||
host: String(mergedValues.proxyHost || '').trim(),
|
||||
port: Number(mergedValues.proxyPort || (proxyTypeRaw === 'http' ? 8080 : 1080)),
|
||||
user: String(mergedValues.proxyUser || '').trim(),
|
||||
password: mergedValues.proxyPassword || "",
|
||||
} : {
|
||||
type: 'socks5',
|
||||
host: '',
|
||||
port: 1080,
|
||||
user: '',
|
||||
password: '',
|
||||
};
|
||||
|
||||
const keepPassword = !forPersist || savePassword;
|
||||
|
||||
@@ -964,9 +1196,14 @@ const ConnectionModal: React.FC<{
|
||||
database: mergedValues.database || "",
|
||||
useSSH: !!mergedValues.useSSH,
|
||||
ssh: sshConfig,
|
||||
useProxy: effectiveUseProxy,
|
||||
proxy: proxyConfig,
|
||||
driver: mergedValues.driver,
|
||||
dsn: mergedValues.dsn,
|
||||
timeout: Number(mergedValues.timeout || 30),
|
||||
redisDB: Number.isFinite(Number(mergedValues.redisDB))
|
||||
? Math.max(0, Math.min(15, Math.trunc(Number(mergedValues.redisDB))))
|
||||
: 0,
|
||||
uri: String(mergedValues.uri || '').trim(),
|
||||
hosts: hosts,
|
||||
topology: topology,
|
||||
@@ -997,6 +1234,7 @@ const ConnectionModal: React.FC<{
|
||||
const defaultPort = getDefaultPortByType(type);
|
||||
if (isFileDatabaseType(type)) {
|
||||
setUseSSH(false);
|
||||
setUseProxy(false);
|
||||
form.setFieldsValue({
|
||||
host: '',
|
||||
port: 0,
|
||||
@@ -1009,7 +1247,14 @@ const ConnectionModal: React.FC<{
|
||||
sshUser: '',
|
||||
sshPassword: '',
|
||||
sshKeyPath: '',
|
||||
useProxy: false,
|
||||
proxyType: 'socks5',
|
||||
proxyHost: '',
|
||||
proxyPort: 1080,
|
||||
proxyUser: '',
|
||||
proxyPassword: '',
|
||||
mysqlTopology: 'single',
|
||||
redisTopology: 'single',
|
||||
mongoTopology: 'single',
|
||||
mongoSrv: false,
|
||||
mongoReadPreference: 'primary',
|
||||
@@ -1018,16 +1263,22 @@ const ConnectionModal: React.FC<{
|
||||
mongoAuthMechanism: '',
|
||||
savePassword: true,
|
||||
mysqlReplicaHosts: [],
|
||||
redisHosts: [],
|
||||
mongoHosts: [],
|
||||
mysqlReplicaUser: '',
|
||||
mysqlReplicaPassword: '',
|
||||
mongoReplicaUser: '',
|
||||
mongoReplicaPassword: '',
|
||||
redisDB: 0,
|
||||
});
|
||||
} else if (type !== 'custom') {
|
||||
const defaultUser = type === 'clickhouse' ? 'default' : 'root';
|
||||
form.setFieldsValue({
|
||||
user: defaultUser,
|
||||
database: '',
|
||||
port: defaultPort,
|
||||
mysqlTopology: 'single',
|
||||
redisTopology: 'single',
|
||||
mongoTopology: 'single',
|
||||
mongoSrv: false,
|
||||
mongoReadPreference: 'primary',
|
||||
@@ -1036,11 +1287,13 @@ const ConnectionModal: React.FC<{
|
||||
mongoAuthMechanism: '',
|
||||
savePassword: true,
|
||||
mysqlReplicaHosts: [],
|
||||
redisHosts: [],
|
||||
mongoHosts: [],
|
||||
mysqlReplicaUser: '',
|
||||
mysqlReplicaPassword: '',
|
||||
mongoReplicaUser: '',
|
||||
mongoReplicaPassword: '',
|
||||
redisDB: 0,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1064,8 +1317,9 @@ const ConnectionModal: React.FC<{
|
||||
{ label: '关系型数据库', items: [
|
||||
{ key: 'mysql', name: 'MySQL', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#00758F' }} /> },
|
||||
{ key: 'mariadb', name: 'MariaDB', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#003545' }} /> },
|
||||
{ key: 'diros', name: 'Diros', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#0050b3' }} /> },
|
||||
{ key: 'diros', name: 'Doris', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#0050b3' }} /> },
|
||||
{ key: 'sphinx', name: 'Sphinx', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#2F5D62' }} /> },
|
||||
{ key: 'clickhouse', name: 'ClickHouse', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#FFCC01' }} /> },
|
||||
{ key: 'postgres', name: 'PostgreSQL', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#336791' }} /> },
|
||||
{ key: 'sqlserver', name: 'SQL Server', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#CC2927' }} /> },
|
||||
{ key: 'sqlite', name: 'SQLite', icon: <FileTextOutlined style={{ fontSize: 24, color: '#003B57' }} /> },
|
||||
@@ -1113,7 +1367,7 @@ const ConnectionModal: React.FC<{
|
||||
)}
|
||||
<div style={{ display: 'flex', height: 360 }}>
|
||||
{/* 左侧分类导航 */}
|
||||
<div style={{ width: 120, borderRight: '1px solid #f0f0f0', paddingRight: 8, flexShrink: 0 }}>
|
||||
<div style={{ width: 120, borderRight: `1px solid ${step1SidebarDividerColor}`, paddingRight: 8, flexShrink: 0 }}>
|
||||
{dbTypeGroups.map((group, idx) => (
|
||||
<div
|
||||
key={group.label}
|
||||
@@ -1123,8 +1377,8 @@ const ConnectionModal: React.FC<{
|
||||
cursor: 'pointer',
|
||||
borderRadius: 6,
|
||||
marginBottom: 4,
|
||||
background: activeGroup === idx ? '#e6f4ff' : 'transparent',
|
||||
color: activeGroup === idx ? '#1677ff' : undefined,
|
||||
background: activeGroup === idx ? step1SidebarActiveBg : 'transparent',
|
||||
color: activeGroup === idx ? step1SidebarActiveColor : undefined,
|
||||
fontWeight: activeGroup === idx ? 500 : 400,
|
||||
transition: 'all 0.2s',
|
||||
fontSize: 13,
|
||||
@@ -1164,23 +1418,30 @@ const ConnectionModal: React.FC<{
|
||||
type: 'mysql',
|
||||
host: 'localhost',
|
||||
port: 3306,
|
||||
database: '',
|
||||
user: 'root',
|
||||
useSSH: false,
|
||||
sshPort: 22,
|
||||
useProxy: false,
|
||||
proxyType: 'socks5',
|
||||
proxyPort: 1080,
|
||||
timeout: 30,
|
||||
uri: '',
|
||||
mysqlTopology: 'single',
|
||||
redisTopology: 'single',
|
||||
mongoTopology: 'single',
|
||||
mongoSrv: false,
|
||||
mongoReadPreference: 'primary',
|
||||
mongoAuthMechanism: '',
|
||||
savePassword: true,
|
||||
mysqlReplicaHosts: [],
|
||||
redisHosts: [],
|
||||
mongoHosts: [],
|
||||
mysqlReplicaUser: '',
|
||||
mysqlReplicaPassword: '',
|
||||
mongoReplicaUser: '',
|
||||
mongoReplicaPassword: '',
|
||||
redisDB: 0,
|
||||
}}
|
||||
onValuesChange={(changed) => {
|
||||
if (testResult) {
|
||||
@@ -1191,8 +1452,34 @@ const ConnectionModal: React.FC<{
|
||||
setUriFeedback(null);
|
||||
}
|
||||
if (changed.useSSH !== undefined) setUseSSH(changed.useSSH);
|
||||
if (changed.useProxy !== undefined) setUseProxy(changed.useProxy);
|
||||
if (changed.proxyType !== undefined) {
|
||||
const nextType = String(changed.proxyType || 'socks5').toLowerCase();
|
||||
if (nextType === 'http') {
|
||||
const currentPort = Number(form.getFieldValue('proxyPort') || 0);
|
||||
if (!currentPort || currentPort === 1080) {
|
||||
form.setFieldValue('proxyPort', 8080);
|
||||
}
|
||||
} else {
|
||||
const currentPort = Number(form.getFieldValue('proxyPort') || 0);
|
||||
if (!currentPort || currentPort === 8080) {
|
||||
form.setFieldValue('proxyPort', 1080);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Type change handled by step 1, but keep sync if select changes (hidden now)
|
||||
if (changed.type !== undefined) setDbType(changed.type);
|
||||
if (changed.redisTopology !== undefined) {
|
||||
const supportedDbs = Array.from({ length: 16 }, (_, i) => i);
|
||||
setRedisDbList(supportedDbs);
|
||||
const selectedDbsRaw = form.getFieldValue('includeRedisDatabases');
|
||||
const selectedDbs = Array.isArray(selectedDbsRaw) ? selectedDbsRaw.map((entry: any) => Number(entry)) : [];
|
||||
const validDbs = selectedDbs
|
||||
.filter((entry: number) => Number.isFinite(entry))
|
||||
.map((entry: number) => Math.trunc(entry))
|
||||
.filter((entry: number) => supportedDbs.includes(entry));
|
||||
form.setFieldValue('includeRedisDatabases', validDbs.length > 0 ? validDbs : undefined);
|
||||
}
|
||||
if (
|
||||
changed.type !== undefined
|
||||
|| changed.host !== undefined
|
||||
@@ -1273,6 +1560,13 @@ const ConnectionModal: React.FC<{
|
||||
onDoubleClick={requestTest}
|
||||
/>
|
||||
</Form.Item>
|
||||
{isFileDb && (
|
||||
<Form.Item label=" " style={{ width: 120 }}>
|
||||
<Button style={{ width: '100%' }} onClick={handleSelectDatabaseFile} loading={selectingDbFile}>
|
||||
浏览...
|
||||
</Button>
|
||||
</Form.Item>
|
||||
)}
|
||||
{!isFileDb && (
|
||||
<Form.Item
|
||||
name="port"
|
||||
@@ -1285,6 +1579,27 @@ const ConnectionModal: React.FC<{
|
||||
)}
|
||||
</div>
|
||||
|
||||
{(dbType === 'postgres' || dbType === 'kingbase' || dbType === 'highgo' || dbType === 'vastbase') && (
|
||||
<Form.Item
|
||||
name="database"
|
||||
label="默认连接数据库(可选)"
|
||||
help="留空会自动尝试 postgres、template1、与当前用户名同名数据库"
|
||||
>
|
||||
<Input placeholder="例如:appdb" />
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
{dbType === 'oracle' && (
|
||||
<Form.Item
|
||||
name="database"
|
||||
label="服务名 (Service Name)"
|
||||
rules={[createUriAwareRequiredRule('请输入 Oracle 服务名(例如 ORCLPDB1)')]}
|
||||
help="请填写监听器注册的 SERVICE_NAME(不是用户名)。例如:ORCLPDB1"
|
||||
>
|
||||
<Input placeholder="例如:ORCLPDB1" />
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
{(dbType === 'mysql' || dbType === 'mariadb' || dbType === 'diros' || dbType === 'sphinx') && (
|
||||
<>
|
||||
<Form.Item name="mysqlTopology" label="连接模式">
|
||||
@@ -1438,11 +1753,36 @@ const ConnectionModal: React.FC<{
|
||||
{/* Redis specific: password only, no username */}
|
||||
{isRedis && (
|
||||
<>
|
||||
<Form.Item name="redisTopology" label="连接模式">
|
||||
<Select
|
||||
options={[
|
||||
{ value: 'single', label: '单机模式' },
|
||||
{ value: 'cluster', label: '集群模式(Redis Cluster)' },
|
||||
]}
|
||||
/>
|
||||
</Form.Item>
|
||||
{redisTopology === 'cluster' && (
|
||||
<Form.Item
|
||||
name="redisHosts"
|
||||
label="集群附加节点地址"
|
||||
help="主节点使用上方主机地址;这里填写其他种子节点,格式:host:port"
|
||||
>
|
||||
<Select mode="tags" placeholder="例如:10.10.0.12:6379、10.10.0.13:6379" tokenSeparators={[',', ';', ' ']} />
|
||||
</Form.Item>
|
||||
)}
|
||||
<Form.Item name="password" label="密码 (可选)">
|
||||
<Input.Password placeholder="Redis 密码(如果设置了 requirepass)" />
|
||||
</Form.Item>
|
||||
<Form.Item name="includeRedisDatabases" label="显示数据库 (留空显示全部)" help="连接测试成功后可选择">
|
||||
<Select mode="multiple" placeholder="选择显示的数据库 (0-15)" allowClear>
|
||||
<Form.Item
|
||||
name="includeRedisDatabases"
|
||||
label="显示数据库 (留空显示全部)"
|
||||
help="连接测试成功后可选择"
|
||||
>
|
||||
<Select
|
||||
mode="multiple"
|
||||
placeholder="选择显示的数据库 (0-15)"
|
||||
allowClear
|
||||
>
|
||||
{redisDbList.map(db => <Select.Option key={db} value={db}>db{db}</Select.Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
@@ -1501,7 +1841,7 @@ const ConnectionModal: React.FC<{
|
||||
</Form.Item>
|
||||
|
||||
{useSSH && (
|
||||
<div style={{ padding: '12px', background: '#f5f5f5', borderRadius: 6, marginTop: 12 }}>
|
||||
<div style={tunnelSectionStyle}>
|
||||
<div style={{ display: 'flex', gap: 16 }}>
|
||||
<Form.Item name="sshHost" label="SSH 主机 (域名或IP)" rules={[{ required: useSSH, message: '请输入SSH主机' }]} style={{ flex: 1 }}>
|
||||
<Input placeholder="例如: ssh.example.com 或 192.168.1.100" />
|
||||
@@ -1531,6 +1871,38 @@ const ConnectionModal: React.FC<{
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Divider style={{ margin: '12px 0' }} />
|
||||
<Form.Item name="useProxy" valuePropName="checked" style={{ marginBottom: 0 }}>
|
||||
<Checkbox>使用代理 (SOCKS5 / HTTP CONNECT)</Checkbox>
|
||||
</Form.Item>
|
||||
|
||||
{useProxy && (
|
||||
<div style={tunnelSectionStyle}>
|
||||
<div style={{ display: 'flex', gap: 16 }}>
|
||||
<Form.Item name="proxyType" label="代理类型" rules={[{ required: useProxy, message: '请选择代理类型' }]} style={{ width: 180 }}>
|
||||
<Select options={[
|
||||
{ value: 'socks5', label: 'SOCKS5' },
|
||||
{ value: 'http', label: 'HTTP CONNECT' },
|
||||
]} />
|
||||
</Form.Item>
|
||||
<Form.Item name="proxyHost" label="代理主机" rules={[{ required: useProxy, message: '请输入代理主机' }]} style={{ flex: 1 }}>
|
||||
<Input placeholder="例如: 127.0.0.1 或 proxy.company.com" />
|
||||
</Form.Item>
|
||||
<Form.Item name="proxyPort" label="端口" rules={[{ required: useProxy, message: '请输入代理端口' }]} style={{ width: 120 }}>
|
||||
<InputNumber style={{ width: '100%' }} min={1} max={65535} />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<div style={{ display: 'flex', gap: 16 }}>
|
||||
<Form.Item name="proxyUser" label="代理用户名(可选)" style={{ flex: 1 }}>
|
||||
<Input placeholder="留空表示无认证" />
|
||||
</Form.Item>
|
||||
<Form.Item name="proxyPassword" label="代理密码(可选)" style={{ flex: 1 }}>
|
||||
<Input.Password placeholder="留空表示无认证" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Divider style={{ margin: '12px 0' }} />
|
||||
|
||||
<Collapse
|
||||
@@ -1627,7 +1999,7 @@ const ConnectionModal: React.FC<{
|
||||
};
|
||||
|
||||
const modalBodyStyle = step === 1
|
||||
? { padding: '16px 24px', overflow: 'hidden' as const }
|
||||
? { padding: '16px 24px', overflow: 'hidden' as const, minHeight: STEP1_MODAL_MIN_BODY_HEIGHT }
|
||||
: {
|
||||
padding: '16px 24px',
|
||||
overflowY: 'auto' as const,
|
||||
@@ -1643,7 +2015,7 @@ const ConnectionModal: React.FC<{
|
||||
footer={getFooter()}
|
||||
centered
|
||||
wrapClassName="connection-modal-wrap"
|
||||
width={step === 1 ? 650 : 600}
|
||||
width={step === 1 ? STEP1_MODAL_WIDTH : STEP2_MODAL_WIDTH}
|
||||
zIndex={10001}
|
||||
destroyOnHidden
|
||||
maskClosable={false}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,152 @@ import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { buildOrderBySQL, buildWhereSQL, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { buildOrderBySQL, buildWhereSQL, quoteIdentPart, quoteQualifiedIdent, withSortBufferTuningSQL, type FilterCondition } from '../utils/sql';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
|
||||
type ViewerPaginationState = {
|
||||
current: number;
|
||||
pageSize: number;
|
||||
total: number;
|
||||
totalKnown: boolean;
|
||||
totalApprox: boolean;
|
||||
totalCountLoading: boolean;
|
||||
totalCountCancelled: boolean;
|
||||
};
|
||||
|
||||
const JS_MAX_SAFE_INTEGER_BIGINT = BigInt(Number.MAX_SAFE_INTEGER);
|
||||
|
||||
const isIntegerText = (text: string): boolean => /^[+-]?\d+$/.test(text);
|
||||
|
||||
const toNonNegativeFiniteNumber = (value: unknown): number | null => {
|
||||
if (typeof value === 'number') {
|
||||
return Number.isFinite(value) && value >= 0 && value <= Number.MAX_SAFE_INTEGER ? value : null;
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return value >= 0n && value <= JS_MAX_SAFE_INTEGER_BIGINT ? Number(value) : null;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
const text = value.trim();
|
||||
if (!text) return null;
|
||||
if (isIntegerText(text)) {
|
||||
try {
|
||||
const parsedBigInt = BigInt(text);
|
||||
if (parsedBigInt < 0n || parsedBigInt > JS_MAX_SAFE_INTEGER_BIGINT) {
|
||||
return null;
|
||||
}
|
||||
return Number(parsedBigInt);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
const parsed = Number(text);
|
||||
return Number.isFinite(parsed) && parsed >= 0 && parsed <= Number.MAX_SAFE_INTEGER ? parsed : null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseTotalFromCountRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized === 'total' || normalized === 'count' || normalized.includes('count')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [, raw] of entries) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
const parseDuckDBApproxTotalRow = (row: any): number | null => {
|
||||
if (!row || typeof row !== 'object') return null;
|
||||
const entries = Object.entries(row as Record<string, unknown>);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
const preferredKeys = ['approx_total', 'estimated_size', 'estimated_rows', 'row_count', 'count', 'total'];
|
||||
for (const preferred of preferredKeys) {
|
||||
for (const [key, raw] of entries) {
|
||||
if (String(key || '').trim().toLowerCase() !== preferred) continue;
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, raw] of entries) {
|
||||
const normalized = String(key || '').trim().toLowerCase();
|
||||
if (normalized.includes('estimate') || normalized.includes('row') || normalized.includes('count') || normalized.includes('total')) {
|
||||
const parsed = toNonNegativeFiniteNumber(raw);
|
||||
if (parsed !== null) return parsed;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const normalizeDuckDBIdentifier = (raw: string): string => {
|
||||
const text = String(raw || '').trim();
|
||||
if (text.length >= 2) {
|
||||
const first = text[0];
|
||||
const last = text[text.length - 1];
|
||||
if ((first === '"' && last === '"') || (first === '`' && last === '`')) {
|
||||
return text.slice(1, -1).trim();
|
||||
}
|
||||
}
|
||||
return text;
|
||||
};
|
||||
|
||||
const resolveDuckDBSchemaAndTable = (dbName: string, tableName: string) => {
|
||||
const rawTable = String(tableName || '').trim();
|
||||
if (!rawTable) return { schemaName: 'main', pureTableName: '' };
|
||||
|
||||
const parts = rawTable.split('.');
|
||||
if (parts.length >= 2) {
|
||||
const pureTableName = normalizeDuckDBIdentifier(parts[parts.length - 1]);
|
||||
const schemaName = normalizeDuckDBIdentifier(parts[parts.length - 2]);
|
||||
if (schemaName && pureTableName) {
|
||||
return { schemaName, pureTableName };
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackSchema = normalizeDuckDBIdentifier(String(dbName || '').trim()) || 'main';
|
||||
return { schemaName: fallbackSchema, pureTableName: normalizeDuckDBIdentifier(rawTable) };
|
||||
};
|
||||
|
||||
const escapeSQLLiteral = (value: string): string => String(value || '').replace(/'/g, "''");
|
||||
|
||||
const isDuckDBUnsupportedTypeError = (msg: string): boolean => /unsupported\s*type:\s*duckdb\./i.test(String(msg || ''));
|
||||
|
||||
const isDuckDBComplexColumnType = (columnType?: string): boolean => {
|
||||
const raw = String(columnType || '').trim().toLowerCase();
|
||||
if (!raw) return false;
|
||||
return raw.includes('map') || raw.includes('struct') || raw.includes('union') || raw.includes('array') || raw.includes('list');
|
||||
};
|
||||
|
||||
const reverseOrderBySQL = (orderBySQL: string): string => {
|
||||
const raw = String(orderBySQL || '').trim();
|
||||
if (!raw) return '';
|
||||
const body = raw.replace(/^order\s+by\s+/i, '').trim();
|
||||
if (!body) return '';
|
||||
|
||||
const parts = body
|
||||
.split(',')
|
||||
.map((part) => part.trim())
|
||||
.filter(Boolean)
|
||||
.map((part) => {
|
||||
if (/\s+asc$/i.test(part)) return part.replace(/\s+asc$/i, ' DESC');
|
||||
if (/\s+desc$/i.test(part)) return part.replace(/\s+desc$/i, ' ASC');
|
||||
return `${part} DESC`;
|
||||
});
|
||||
if (parts.length === 0) return '';
|
||||
return ` ORDER BY ${parts.join(', ')}`;
|
||||
};
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
@@ -16,30 +161,135 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const fetchSeqRef = useRef(0);
|
||||
const countSeqRef = useRef(0);
|
||||
const countKeyRef = useRef<string>('');
|
||||
const duckdbApproxSeqRef = useRef(0);
|
||||
const duckdbApproxKeyRef = useRef<string>('');
|
||||
const manualCountSeqRef = useRef(0);
|
||||
const manualCountKeyRef = useRef<string>('');
|
||||
const pkSeqRef = useRef(0);
|
||||
const pkKeyRef = useRef<string>('');
|
||||
const latestConfigRef = useRef<any>(null);
|
||||
const latestDbTypeRef = useRef<string>('');
|
||||
const latestDbNameRef = useRef<string>('');
|
||||
const latestCountSqlRef = useRef<string>('');
|
||||
const latestCountKeyRef = useRef<string>('');
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
const [pagination, setPagination] = useState<ViewerPaginationState>({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
total: 0,
|
||||
totalKnown: false
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
|
||||
const [showFilter, setShowFilter] = useState(false);
|
||||
const [filterConditions, setFilterConditions] = useState<FilterCondition[]>([]);
|
||||
const currentConnType = (connections.find(c => c.id === tab.connectionId)?.config?.type || '').toLowerCase();
|
||||
const forceReadOnly = currentConnType === 'tdengine';
|
||||
const duckdbSafeSelectCacheRef = useRef<Record<string, string>>({});
|
||||
const currentConnConfig = connections.find(c => c.id === tab.connectionId)?.config;
|
||||
const currentConnCaps = getDataSourceCapabilities(currentConnConfig);
|
||||
const currentConnType = currentConnCaps.type;
|
||||
const forceReadOnly = currentConnCaps.forceReadOnlyQueryResult;
|
||||
|
||||
useEffect(() => {
|
||||
setPkColumns([]);
|
||||
pkKeyRef.current = '';
|
||||
countKeyRef.current = '';
|
||||
setPagination(prev => ({ ...prev, current: 1, total: 0, totalKnown: false }));
|
||||
duckdbApproxKeyRef.current = '';
|
||||
manualCountKeyRef.current = '';
|
||||
duckdbSafeSelectCacheRef.current = {};
|
||||
latestConfigRef.current = null;
|
||||
latestDbTypeRef.current = '';
|
||||
latestDbNameRef.current = '';
|
||||
latestCountSqlRef.current = '';
|
||||
latestCountKeyRef.current = '';
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
current: 1,
|
||||
total: 0,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
}, [tab.connectionId, tab.dbName, tab.tableName]);
|
||||
|
||||
const handleDuckDBManualCount = useCallback(async () => {
|
||||
if (latestDbTypeRef.current !== 'duckdb') {
|
||||
return;
|
||||
}
|
||||
const config = latestConfigRef.current;
|
||||
const dbName = latestDbNameRef.current;
|
||||
const countSql = latestCountSqlRef.current;
|
||||
const countKey = latestCountKeyRef.current;
|
||||
|
||||
if (!config || !countSql || !countKey) {
|
||||
message.warning('当前结果集尚未就绪,请先执行一次加载');
|
||||
return;
|
||||
}
|
||||
|
||||
manualCountKeyRef.current = countKey;
|
||||
const countSeq = ++manualCountSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: true, totalCountCancelled: false }));
|
||||
const countConfig: any = { ...(config as any), timeout: 120 };
|
||||
|
||||
try {
|
||||
const resCount = await DBQuery(countConfig as any, dbName, countSql);
|
||||
const countDuration = Date.now() - countStart;
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-duckdb-manual-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount?.success ? 'success' : 'error',
|
||||
duration: countDuration,
|
||||
message: resCount?.success ? '' : String(resCount?.message || '统计失败'),
|
||||
dbName
|
||||
});
|
||||
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
|
||||
if (!resCount?.success) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(String(resCount?.message || '统计总数失败'));
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
return;
|
||||
}
|
||||
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) {
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error('统计结果解析失败');
|
||||
return;
|
||||
}
|
||||
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
} catch (e: any) {
|
||||
if (manualCountSeqRef.current !== countSeq) return;
|
||||
if (manualCountKeyRef.current !== countKey) return;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false }));
|
||||
message.error(`统计总数失败: ${String(e?.message || e)}`);
|
||||
}
|
||||
}, [addSqlLog]);
|
||||
|
||||
const handleDuckDBCancelManualCount = useCallback(() => {
|
||||
manualCountSeqRef.current++;
|
||||
setPagination(prev => ({ ...prev, totalCountLoading: false, totalCountCancelled: true }));
|
||||
}, []);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
setLoading(true);
|
||||
@@ -70,35 +320,112 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
sql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
const offset = (page - 1) * size;
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
const baseSql = `SELECT * FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
const orderBySQL = buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
let sql = `${baseSql}${orderBySQL}`;
|
||||
const totalRows = Number(pagination.total);
|
||||
const hasFiniteTotal = Number.isFinite(totalRows) && totalRows >= 0;
|
||||
const totalKnown = pagination.totalKnown && hasFiniteTotal;
|
||||
const totalPages = hasFiniteTotal ? Math.max(1, Math.ceil(totalRows / size)) : 0;
|
||||
const currentPage = totalPages > 0 ? Math.min(Math.max(1, page), totalPages) : Math.max(1, page);
|
||||
const offset = (currentPage - 1) * size;
|
||||
const isClickHouse = dbTypeLower === 'clickhouse';
|
||||
const reverseOrderSQL = isClickHouse ? reverseOrderBySQL(orderBySQL) : '';
|
||||
let useClickHouseReversePagination = false;
|
||||
let clickHouseReverseLimit = 0;
|
||||
let clickHouseReverseHasMore = false;
|
||||
// ClickHouse 深分页在超大 OFFSET 下容易超时。对于总数已知且存在 ORDER BY 的场景,
|
||||
// 当“尾部偏移”小于“头部偏移”时,改为反向 ORDER BY + 小 OFFSET,并在前端翻转结果。
|
||||
if (isClickHouse && totalKnown && offset > 0 && reverseOrderSQL) {
|
||||
const pageRowCount = Math.max(0, Math.min(size, totalRows - offset));
|
||||
if (pageRowCount > 0) {
|
||||
const tailOffset = Math.max(0, totalRows - (offset + pageRowCount));
|
||||
if (tailOffset < offset) {
|
||||
sql = `${baseSql}${reverseOrderSQL} LIMIT ${pageRowCount} OFFSET ${tailOffset}`;
|
||||
useClickHouseReversePagination = true;
|
||||
clickHouseReverseLimit = pageRowCount;
|
||||
clickHouseReverseHasMore = currentPage < totalPages;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!useClickHouseReversePagination) {
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
}
|
||||
|
||||
const requestStartTime = Date.now();
|
||||
let executedSql = sql;
|
||||
try {
|
||||
const executeDataQuery = async (querySql: string, attemptLabel: string) => {
|
||||
const startTime = Date.now();
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
try {
|
||||
const result = await DBQuery(config as any, dbName, querySql);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: result.success ? 'success' : 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: result.success ? '' : `${attemptLabel}: ${result.message}`,
|
||||
affectedRows: Array.isArray(result.data) ? result.data.length : undefined,
|
||||
dbName
|
||||
});
|
||||
return result;
|
||||
} catch (e: any) {
|
||||
const errMessage = String(e?.message || e || 'query failed');
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
sql: querySql,
|
||||
status: 'error',
|
||||
duration: Date.now() - startTime,
|
||||
message: `${attemptLabel}: ${errMessage}`,
|
||||
dbName
|
||||
});
|
||||
return { success: false, message: errMessage, data: [], fields: [] };
|
||||
}
|
||||
};
|
||||
|
||||
const hasSort = !!sortInfo?.columnKey && (sortInfo?.order === 'ascend' || sortInfo?.order === 'descend');
|
||||
const isSortMemoryErr = (msg: string) => /error\s*1038|out of sort memory/i.test(String(msg || ''));
|
||||
let resData = await executeDataQuery(sql, '主查询');
|
||||
|
||||
if (!resData.success && dbTypeLower === 'duckdb' && isDuckDBUnsupportedTypeError(String(resData.message || ''))) {
|
||||
const cacheKey = `${tab.connectionId}|${dbName}|${tableName}`;
|
||||
let safeSelect = duckdbSafeSelectCacheRef.current[cacheKey] || '';
|
||||
if (!safeSelect) {
|
||||
try {
|
||||
const resCols = await DBGetColumns(config as any, dbName, tableName);
|
||||
if (resCols?.success && Array.isArray(resCols.data)) {
|
||||
const columnDefs = resCols.data as ColumnDefinition[];
|
||||
const selectParts = columnDefs.map((col) => {
|
||||
const colName = String(col?.name || '').trim();
|
||||
if (!colName) return '';
|
||||
const quotedCol = quoteIdentPart(dbType, colName);
|
||||
if (isDuckDBComplexColumnType(col?.type)) {
|
||||
return `CAST(${quotedCol} AS VARCHAR) AS ${quotedCol}`;
|
||||
}
|
||||
return quotedCol;
|
||||
}).filter(Boolean);
|
||||
if (selectParts.length > 0) {
|
||||
safeSelect = selectParts.join(', ');
|
||||
duckdbSafeSelectCacheRef.current[cacheKey] = safeSelect;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// ignore and keep original error path
|
||||
}
|
||||
}
|
||||
|
||||
if (safeSelect) {
|
||||
let fallbackSql = `SELECT ${safeSelect} FROM ${quoteQualifiedIdent(dbType, tableName)} ${whereSQL}`;
|
||||
fallbackSql += buildOrderBySQL(dbType, sortInfo, pkColumns);
|
||||
fallbackSql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
executedSql = fallbackSql;
|
||||
resData = await executeDataQuery(fallbackSql, '复杂类型降级重试');
|
||||
}
|
||||
}
|
||||
|
||||
if (!resData.success && isMySQLFamily && hasSort && isSortMemoryErr(resData.message)) {
|
||||
const retrySql32MB = withSortBufferTuningSQL(dbType, sql, 32 * 1024 * 1024);
|
||||
if (retrySql32MB !== sql) {
|
||||
@@ -141,7 +468,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
let resultData = resData.data as any[];
|
||||
if (!Array.isArray(resultData)) resultData = [];
|
||||
|
||||
const hasMore = resultData.length > size;
|
||||
if (useClickHouseReversePagination) {
|
||||
// 反向查询后恢复为原排序方向,保证用户看到的仍是“最后一页正序数据”。
|
||||
resultData = resultData.slice(0, clickHouseReverseLimit).reverse();
|
||||
}
|
||||
|
||||
const hasMore = useClickHouseReversePagination ? clickHouseReverseHasMore : resultData.length > size;
|
||||
if (hasMore) resultData = resultData.slice(0, size);
|
||||
|
||||
let fieldNames = resData.fields || [];
|
||||
@@ -156,26 +488,71 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setData(resultData);
|
||||
const countKey = `${tab.connectionId}|${dbName}|${tableName}|${whereSQL}`;
|
||||
const derivedTotalKnown = !hasMore;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : page * size + 1;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : currentPage * size + 1;
|
||||
const isDuckDB = dbTypeLower === 'duckdb';
|
||||
const minExpectedTotal = hasMore ? offset + resultData.length + 1 : offset + resultData.length;
|
||||
if (derivedTotalKnown) countKeyRef.current = countKey;
|
||||
latestConfigRef.current = config;
|
||||
latestDbTypeRef.current = dbTypeLower;
|
||||
latestDbNameRef.current = dbName;
|
||||
latestCountSqlRef.current = countSql;
|
||||
latestCountKeyRef.current = countKey;
|
||||
|
||||
setPagination(prev => {
|
||||
if (derivedTotalKnown) {
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: true };
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
if (prev.totalKnown && countKeyRef.current === countKey) {
|
||||
return { ...prev, current: page, pageSize: size };
|
||||
if (!isDuckDB) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
// 当当前页存在“下一页”信号时,已知总数至少应大于当前页末尾。
|
||||
// 若旧总数不满足该条件(例如历史统计值为 0),降级为未知总数并回退到 derivedTotal。
|
||||
if (Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return { ...prev, current: currentPage, pageSize: size };
|
||||
}
|
||||
}
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: false };
|
||||
const keepManualCounting = prev.totalCountLoading && manualCountKeyRef.current === countKey;
|
||||
if (isDuckDB && prev.totalApprox && duckdbApproxKeyRef.current === countKey && Number.isFinite(prev.total) && prev.total >= minExpectedTotal) {
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
current: currentPage,
|
||||
pageSize: size,
|
||||
total: derivedTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: false,
|
||||
totalCountLoading: keepManualCounting,
|
||||
totalCountCancelled: keepManualCounting ? false : prev.totalCountCancelled,
|
||||
};
|
||||
});
|
||||
|
||||
if (!derivedTotalKnown) {
|
||||
const shouldRunAsyncCount = !derivedTotalKnown && !isDuckDB;
|
||||
if (shouldRunAsyncCount) {
|
||||
if (countKeyRef.current !== countKey) {
|
||||
countKeyRef.current = countKey;
|
||||
const countSeq = ++countSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
// 大表 COUNT(*) 可能非常慢,且在部分运行时环境下会影响后续操作响应;
|
||||
// 这里为统计请求设置更短的超时,避免“后台统计”长期占用资源。
|
||||
// DuckDB 大文件场景下该统计会显著拖慢翻页,已禁用后台 COUNT。
|
||||
const countConfig: any = { ...(config as any), timeout: 5 };
|
||||
|
||||
DBQuery(countConfig, dbName, countSql)
|
||||
@@ -198,10 +575,17 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (!resCount.success) return;
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) return;
|
||||
|
||||
const total = Number(resCount.data[0]?.['total']);
|
||||
if (!Number.isFinite(total) || total < 0) return;
|
||||
const total = parseTotalFromCountRow(resCount.data[0]);
|
||||
if (total === null) return;
|
||||
|
||||
setPagination(prev => ({ ...prev, total, totalKnown: true }));
|
||||
setPagination(prev => ({
|
||||
...prev,
|
||||
total,
|
||||
totalKnown: true,
|
||||
totalApprox: false,
|
||||
totalCountLoading: false,
|
||||
totalCountCancelled: false,
|
||||
}));
|
||||
})
|
||||
.catch(() => {
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
@@ -210,6 +594,50 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isDuckDB && !derivedTotalKnown && whereSQL.trim() === '' && duckdbApproxKeyRef.current !== countKey) {
|
||||
duckdbApproxKeyRef.current = countKey;
|
||||
const approxSeq = ++duckdbApproxSeqRef.current;
|
||||
const { schemaName, pureTableName } = resolveDuckDBSchemaAndTable(dbName, tableName);
|
||||
const escapedSchema = escapeSQLLiteral(schemaName);
|
||||
const escapedTable = escapeSQLLiteral(pureTableName);
|
||||
const approxConfig: any = { ...(config as any), timeout: 3 };
|
||||
const approxSqlCandidates = [
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE schema_name='${escapedSchema}' AND table_name='${escapedTable}' LIMIT 1`,
|
||||
`SELECT estimated_size AS approx_total FROM duckdb_tables() WHERE table_name='${escapedTable}' ORDER BY CASE WHEN schema_name='${escapedSchema}' THEN 0 ELSE 1 END LIMIT 1`,
|
||||
];
|
||||
|
||||
(async () => {
|
||||
for (const approxSql of approxSqlCandidates) {
|
||||
try {
|
||||
const approxRes = await DBQuery(approxConfig as any, dbName, approxSql);
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
if (!approxRes?.success || !Array.isArray(approxRes.data) || approxRes.data.length === 0) continue;
|
||||
|
||||
const approxTotal = parseDuckDBApproxTotalRow(approxRes.data[0]);
|
||||
if (approxTotal === null) continue;
|
||||
if (!Number.isFinite(approxTotal) || approxTotal < minExpectedTotal) continue;
|
||||
|
||||
setPagination(prev => {
|
||||
if (countKeyRef.current !== countKey) return prev;
|
||||
if (prev.totalKnown) return prev;
|
||||
return {
|
||||
...prev,
|
||||
total: approxTotal,
|
||||
totalKnown: false,
|
||||
totalApprox: true,
|
||||
totalCountCancelled: false,
|
||||
};
|
||||
});
|
||||
return;
|
||||
} catch {
|
||||
if (duckdbApproxSeqRef.current !== approxSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
} else {
|
||||
message.error(String(resData.message || '查询失败'));
|
||||
}
|
||||
@@ -227,7 +655,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
});
|
||||
}
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns]);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns, pagination.total, pagination.totalKnown]);
|
||||
// 依赖 pkColumns:在无手动排序时可回退到主键稳定排序。
|
||||
// 主键信息只会在首次加载后更新一次,避免循环查询。
|
||||
|
||||
@@ -259,6 +687,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
columnNames={columnNames}
|
||||
loading={loading}
|
||||
tableName={tab.tableName}
|
||||
exportScope="table"
|
||||
dbName={tab.dbName}
|
||||
connectionId={tab.connectionId}
|
||||
pkColumns={pkColumns}
|
||||
@@ -266,6 +695,8 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
onSort={handleSort}
|
||||
onPageChange={handlePageChange}
|
||||
pagination={pagination}
|
||||
onRequestTotalCount={currentConnType === 'duckdb' ? handleDuckDBManualCount : undefined}
|
||||
onCancelTotalCount={currentConnType === 'duckdb' ? handleDuckDBCancelManualCount : undefined}
|
||||
showFilter={showFilter}
|
||||
onToggleFilter={handleToggleFilter}
|
||||
onApplyFilter={handleApplyFilter}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -27,8 +27,9 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
const b = parseInt(hex.substring(4, 6), 16);
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
};
|
||||
const bgMain = getBg('#1f1f1f');
|
||||
const bgToolbar = getBg('#2a2a2a');
|
||||
const bgMain = getBg('#1d1d1d');
|
||||
const panelDividerColor = darkMode ? 'rgba(255,255,255,0.08)' : 'rgba(0,0,0,0.08)';
|
||||
const panelMutedTextColor = darkMode ? 'rgba(255,255,255,0.62)' : 'rgba(0,0,0,0.58)';
|
||||
const logScrollbarThumb = darkMode ? 'rgba(255, 255, 255, 0.34)' : 'rgba(0, 0, 0, 0.26)';
|
||||
const logScrollbarThumbHover = darkMode ? 'rgba(255, 255, 255, 0.5)' : 'rgba(0, 0, 0, 0.36)';
|
||||
|
||||
@@ -37,7 +38,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
title: 'Time',
|
||||
dataIndex: 'timestamp',
|
||||
width: 80,
|
||||
render: (ts: number) => <span style={{ color: '#888', fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
render: (ts: number) => <span style={{ color: panelMutedTextColor, fontSize: '12px' }}>{new Date(ts).toLocaleTimeString()}</span>
|
||||
},
|
||||
{
|
||||
title: 'Status',
|
||||
@@ -62,7 +63,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
<div style={{ fontFamily: 'monospace', wordBreak: 'break-all', fontSize: '12px', lineHeight: '1.2' }}>
|
||||
<div style={{ color: darkMode ? '#a6e22e' : '#005cc5' }}>{text}</div>
|
||||
{record.message && <div style={{ color: '#ff4d4f', marginTop: 2 }}>{record.message}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: '#888', marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
{record.affectedRows !== undefined && <div style={{ color: panelMutedTextColor, marginTop: 1 }}>Affected: {record.affectedRows}</div>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -71,7 +72,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
return (
|
||||
<div style={{
|
||||
height,
|
||||
borderTop: 'none',
|
||||
borderTop: `1px solid ${panelDividerColor}`,
|
||||
background: bgMain,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
@@ -95,7 +96,7 @@ const LogPanel: React.FC<LogPanelProps> = ({ height, onClose, onResizeStart }) =
|
||||
{/* Toolbar */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
borderBottom: 'none',
|
||||
borderBottom: `1px solid ${panelDividerColor}`,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center',
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef, useMemo } from 'react';
|
||||
import Editor, { OnMount } from '@monaco-editor/react';
|
||||
import { Button, message, Modal, Input, Form, Dropdown, MenuProps, Tooltip, Select, Tabs } from 'antd';
|
||||
import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutlined, CloseOutlined } from '@ant-design/icons';
|
||||
@@ -7,6 +7,7 @@ import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBQuery, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
import { getDataSourceCapabilities } from '../utils/dataSourceCapabilities';
|
||||
|
||||
const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [query, setQuery] = useState(tab.query || 'SELECT * FROM ');
|
||||
@@ -14,6 +15,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
type ResultSet = {
|
||||
key: string;
|
||||
sql: string;
|
||||
exportSql?: string;
|
||||
rows: any[];
|
||||
columns: string[];
|
||||
tableName?: string;
|
||||
@@ -47,6 +49,10 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const visibleDbsRef = useRef<string[]>([]); // Store visible databases for cross-db intellisense
|
||||
|
||||
const connections = useStore(state => state.connections);
|
||||
const queryCapableConnections = useMemo(
|
||||
() => connections.filter(c => getDataSourceCapabilities(c.config).supportsQueryEditor),
|
||||
[connections]
|
||||
);
|
||||
const addSqlLog = useStore(state => state.addSqlLog);
|
||||
const currentConnectionIdRef = useRef(currentConnectionId);
|
||||
const currentDbRef = useRef(currentDb);
|
||||
@@ -64,6 +70,16 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
currentConnectionIdRef.current = currentConnectionId;
|
||||
}, [currentConnectionId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!queryCapableConnections.some(c => c.id === currentConnectionId)) {
|
||||
const fallback = queryCapableConnections[0]?.id || '';
|
||||
if (fallback && fallback !== currentConnectionId) {
|
||||
setCurrentConnectionId(fallback);
|
||||
setCurrentDb('');
|
||||
}
|
||||
}
|
||||
}, [queryCapableConnections, currentConnectionId]);
|
||||
|
||||
useEffect(() => {
|
||||
currentDbRef.current = currentDb;
|
||||
}, [currentDb]);
|
||||
@@ -922,7 +938,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const applyAutoLimit = (sql: string, dbType: string, maxRows: number): { sql: string; applied: boolean; maxRows: number } => {
|
||||
const normalizedType = (dbType || 'mysql').toLowerCase();
|
||||
const supportsLimit = normalizedType === 'mysql' || normalizedType === 'mariadb' || normalizedType === 'diros' || normalizedType === 'sphinx' || normalizedType === 'postgres' || normalizedType === 'kingbase' || normalizedType === 'sqlite' || normalizedType === 'duckdb' || normalizedType === 'tdengine' || normalizedType === '';
|
||||
const supportsLimit = normalizedType === 'mysql' || normalizedType === 'mariadb' || normalizedType === 'diros' || normalizedType === 'sphinx' || normalizedType === 'postgres' || normalizedType === 'kingbase' || normalizedType === 'sqlite' || normalizedType === 'duckdb' || normalizedType === 'tdengine' || normalizedType === 'clickhouse' || normalizedType === '';
|
||||
if (!supportsLimit) return { sql, applied: false, maxRows };
|
||||
if (!Number.isFinite(maxRows) || maxRows <= 0) return { sql, applied: false, maxRows };
|
||||
|
||||
@@ -977,6 +993,12 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
const connCaps = getDataSourceCapabilities(conn.config);
|
||||
if (!connCaps.supportsQueryEditor) {
|
||||
message.error("当前数据源不支持 SQL 查询编辑器,请使用对应专用页面。");
|
||||
if (runSeqRef.current === runSeq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = {
|
||||
...conn.config,
|
||||
@@ -1000,8 +1022,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const nextResultSets: ResultSet[] = [];
|
||||
const maxRows = Number(queryOptions?.maxRows) || 0;
|
||||
const dbType = String((config as any).type || 'mysql');
|
||||
const normalizedDbType = dbType.toLowerCase();
|
||||
const forceReadOnlyResult = normalizedDbType === 'tdengine';
|
||||
const forceReadOnlyResult = connCaps.forceReadOnlyQueryResult;
|
||||
const wantsLimitProbe = Number.isFinite(maxRows) && maxRows > 0;
|
||||
const probeLimit = wantsLimitProbe ? (maxRows + 1) : 0;
|
||||
let anyTruncated = false;
|
||||
@@ -1066,6 +1087,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
nextResultSets.push({
|
||||
key: `result-${idx + 1}`,
|
||||
sql: rawStatement,
|
||||
exportSql: limited.applied ? applyAutoLimit(rawStatement, dbType, Math.max(1, Number(maxRows) || 1)).sql : rawStatement,
|
||||
rows,
|
||||
columns: cols,
|
||||
tableName: simpleTableName,
|
||||
@@ -1082,6 +1104,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
nextResultSets.push({
|
||||
key: `result-${idx + 1}`,
|
||||
sql: rawStatement,
|
||||
exportSql: rawStatement,
|
||||
rows: [row],
|
||||
columns: ['affectedRows'],
|
||||
pkColumns: [],
|
||||
@@ -1223,7 +1246,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
setCurrentConnectionId(val);
|
||||
setCurrentDb('');
|
||||
}}
|
||||
options={connections.map(c => ({ label: c.name, value: c.id }))}
|
||||
options={queryCapableConnections.map(c => ({ label: c.name, value: c.id }))}
|
||||
showSearch
|
||||
/>
|
||||
<Select
|
||||
@@ -1333,6 +1356,8 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
columnNames={rs.columns}
|
||||
loading={loading}
|
||||
tableName={rs.tableName}
|
||||
exportScope="queryResult"
|
||||
resultSql={rs.exportSql || rs.sql}
|
||||
dbName={currentDb}
|
||||
connectionId={currentConnectionId}
|
||||
pkColumns={rs.pkColumns}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { useStore } from '../store';
|
||||
import { RedisKeyInfo, RedisValue, StreamEntry } from '../types';
|
||||
import Editor from '@monaco-editor/react';
|
||||
import type { DataNode } from 'antd/es/tree';
|
||||
import { normalizeOpacityForPlatform } from '../utils/appearance';
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
@@ -14,6 +15,12 @@ const REDIS_TREE_KEY_TYPE_WIDTH = 92;
|
||||
const REDIS_TREE_KEY_TYPE_WIDTH_NARROW = 84;
|
||||
const REDIS_TREE_KEY_TTL_WIDTH = 92;
|
||||
const REDIS_TREE_HIDE_TTL_THRESHOLD = 460;
|
||||
const REDIS_KEY_INITIAL_LOAD_COUNT = 2000;
|
||||
const REDIS_KEY_LOAD_MORE_COUNT = 2000;
|
||||
const REDIS_KEY_SEARCH_INITIAL_LOAD_COUNT = 600;
|
||||
const REDIS_KEY_SEARCH_LOAD_MORE_COUNT = 1000;
|
||||
const REDIS_LARGE_KEYSPACE_THRESHOLD = 10000;
|
||||
const REDIS_LARGE_KEYSPACE_MAX_EXPANDED_GROUPS = 200;
|
||||
|
||||
interface RedisViewerProps {
|
||||
connectionId: string;
|
||||
@@ -212,18 +219,17 @@ const ResizableDivider: React.FC<{
|
||||
<div
|
||||
onMouseDown={handleMouseDown}
|
||||
style={{
|
||||
width: 6,
|
||||
width: 5,
|
||||
cursor: 'col-resize',
|
||||
background: '#f0f0f0',
|
||||
background: 'transparent',
|
||||
flexShrink: 0,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center'
|
||||
justifyContent: 'center',
|
||||
zIndex: 10,
|
||||
}}
|
||||
onMouseEnter={(e) => (e.currentTarget.style.background = '#d9d9d9')}
|
||||
onMouseLeave={(e) => (e.currentTarget.style.background = '#f0f0f0')}
|
||||
title="拖动调整宽度"
|
||||
>
|
||||
<div style={{ width: 2, height: 30, background: '#bfbfbf', borderRadius: 1 }} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -239,36 +245,79 @@ type RedisKeyTreeGroup = {
|
||||
path: string;
|
||||
children: Map<string, RedisKeyTreeGroup>;
|
||||
leaves: RedisKeyTreeLeaf[];
|
||||
leafCount: number;
|
||||
};
|
||||
|
||||
type RedisKeyTreeResult = {
|
||||
treeData: DataNode[];
|
||||
rawKeyByNodeKey: Map<string, string>;
|
||||
leafNodeKeyByRawKey: Map<string, string>;
|
||||
treeData: RedisTreeDataNode[];
|
||||
groupKeys: string[];
|
||||
};
|
||||
|
||||
type RedisTreeDataNode = DataNode & {
|
||||
nodeType: 'group' | 'leaf';
|
||||
groupName?: string;
|
||||
groupLeafCount?: number;
|
||||
leafLabel?: string;
|
||||
rawKey?: string;
|
||||
keyType?: string;
|
||||
ttl?: number;
|
||||
};
|
||||
|
||||
const buildLeafNodeKey = (rawKey: string): string => `key:${rawKey}`;
|
||||
|
||||
const parseRawKeyFromNodeKey = (nodeKey: React.Key): string | null => {
|
||||
const keyText = String(nodeKey);
|
||||
if (!keyText.startsWith('key:')) {
|
||||
return null;
|
||||
}
|
||||
return keyText.slice(4);
|
||||
};
|
||||
|
||||
const getRedisScanLoadCount = (pattern: string, append: boolean): number => {
|
||||
const normalizedPattern = pattern.trim() || '*';
|
||||
if (normalizedPattern === '*') {
|
||||
return append ? REDIS_KEY_LOAD_MORE_COUNT : REDIS_KEY_INITIAL_LOAD_COUNT;
|
||||
}
|
||||
return append ? REDIS_KEY_SEARCH_LOAD_MORE_COUNT : REDIS_KEY_SEARCH_INITIAL_LOAD_COUNT;
|
||||
};
|
||||
|
||||
const normalizeRedisCursor = (value: unknown): string => {
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
return trimmed === '' ? '0' : trimmed;
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
if (!Number.isFinite(value)) {
|
||||
return '0';
|
||||
}
|
||||
return Math.trunc(value).toString();
|
||||
}
|
||||
if (typeof value === 'bigint') {
|
||||
return value.toString();
|
||||
}
|
||||
return '0';
|
||||
};
|
||||
|
||||
const normalizeKeySegment = (segment: string): string => {
|
||||
return segment === '' ? EMPTY_SEGMENT_LABEL : segment;
|
||||
};
|
||||
|
||||
const createTreeGroup = (name: string, path: string): RedisKeyTreeGroup => {
|
||||
return { name, path, children: new Map(), leaves: [] };
|
||||
return { name, path, children: new Map(), leaves: [], leafCount: 0 };
|
||||
};
|
||||
|
||||
const countGroupLeafNodes = (group: RedisKeyTreeGroup): number => {
|
||||
const calculateGroupLeafCount = (group: RedisKeyTreeGroup): number => {
|
||||
let count = group.leaves.length;
|
||||
group.children.forEach((child) => {
|
||||
count += countGroupLeafNodes(child);
|
||||
count += calculateGroupLeafCount(child);
|
||||
});
|
||||
group.leafCount = count;
|
||||
return count;
|
||||
};
|
||||
|
||||
const buildRedisKeyTree = (
|
||||
keys: RedisKeyInfo[],
|
||||
formatTTL: (ttl: number) => string,
|
||||
getTypeColor: (type: string) => string,
|
||||
showTTL: boolean
|
||||
sortLeafNodes: boolean
|
||||
): RedisKeyTreeResult => {
|
||||
const root = createTreeGroup('__root__', '__root__');
|
||||
|
||||
@@ -298,105 +347,41 @@ const buildRedisKeyTree = (
|
||||
|
||||
current.leaves.push({ keyInfo, label: leafLabel });
|
||||
});
|
||||
calculateGroupLeafCount(root);
|
||||
|
||||
const rawKeyByNodeKey = new Map<string, string>();
|
||||
const leafNodeKeyByRawKey = new Map<string, string>();
|
||||
const groupKeys: string[] = [];
|
||||
|
||||
const toTreeNodes = (group: RedisKeyTreeGroup): DataNode[] => {
|
||||
const toTreeNodes = (group: RedisKeyTreeGroup): RedisTreeDataNode[] => {
|
||||
const childGroups = Array.from(group.children.values()).sort((a, b) => a.name.localeCompare(b.name));
|
||||
const childLeaves = [...group.leaves].sort((a, b) => a.keyInfo.key.localeCompare(b.keyInfo.key));
|
||||
const childLeaves = sortLeafNodes
|
||||
? [...group.leaves].sort((a, b) => a.keyInfo.key.localeCompare(b.keyInfo.key))
|
||||
: group.leaves;
|
||||
|
||||
const groupNodes: DataNode[] = childGroups.map((child) => {
|
||||
const groupNodes: RedisTreeDataNode[] = childGroups.map((child) => {
|
||||
const groupNodeKey = `group:${child.path}`;
|
||||
groupKeys.push(groupNodeKey);
|
||||
return {
|
||||
key: groupNodeKey,
|
||||
title: (
|
||||
<Space size={6}>
|
||||
<FolderOpenOutlined style={{ color: '#8c8c8c' }} />
|
||||
<span>{child.name}</span>
|
||||
<span style={{ fontSize: 12, color: '#999' }}>({countGroupLeafNodes(child)})</span>
|
||||
</Space>
|
||||
),
|
||||
title: child.name,
|
||||
nodeType: 'group',
|
||||
groupName: child.name,
|
||||
groupLeafCount: child.leafCount,
|
||||
selectable: false,
|
||||
disableCheckbox: true,
|
||||
children: toTreeNodes(child),
|
||||
};
|
||||
});
|
||||
|
||||
const leafNodes: DataNode[] = childLeaves.map((leaf) => {
|
||||
const nodeKey = `key:${leaf.keyInfo.key}`;
|
||||
rawKeyByNodeKey.set(nodeKey, leaf.keyInfo.key);
|
||||
leafNodeKeyByRawKey.set(leaf.keyInfo.key, nodeKey);
|
||||
const leafNodes: RedisTreeDataNode[] = childLeaves.map((leaf) => {
|
||||
return {
|
||||
key: nodeKey,
|
||||
key: buildLeafNodeKey(leaf.keyInfo.key),
|
||||
isLeaf: true,
|
||||
title: (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 8,
|
||||
minWidth: 0,
|
||||
width: '100%',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 6,
|
||||
minWidth: 0,
|
||||
flex: 1,
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<KeyOutlined style={{ color: '#1677ff', flexShrink: 0 }} />
|
||||
<Tooltip title={leaf.keyInfo.key}>
|
||||
<span
|
||||
style={{
|
||||
minWidth: 0,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
whiteSpace: 'nowrap',
|
||||
display: 'block',
|
||||
}}
|
||||
>
|
||||
{leaf.label}
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Tag
|
||||
color={getTypeColor(leaf.keyInfo.type)}
|
||||
style={{
|
||||
marginInlineEnd: 0,
|
||||
width: showTTL ? REDIS_TREE_KEY_TYPE_WIDTH : REDIS_TREE_KEY_TYPE_WIDTH_NARROW,
|
||||
textAlign: 'center',
|
||||
flexShrink: 0
|
||||
}}
|
||||
>
|
||||
{leaf.keyInfo.type}
|
||||
</Tag>
|
||||
{showTTL && (
|
||||
<span
|
||||
style={{
|
||||
width: REDIS_TREE_KEY_TTL_WIDTH,
|
||||
fontSize: 12,
|
||||
color: '#999',
|
||||
textAlign: 'left',
|
||||
whiteSpace: 'nowrap',
|
||||
flexShrink: 0,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{formatTTL(leaf.keyInfo.ttl)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
title: leaf.label,
|
||||
nodeType: 'leaf',
|
||||
leafLabel: leaf.label,
|
||||
rawKey: leaf.keyInfo.key,
|
||||
keyType: leaf.keyInfo.type,
|
||||
ttl: leaf.keyInfo.ttl,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -405,20 +390,31 @@ const buildRedisKeyTree = (
|
||||
|
||||
return {
|
||||
treeData: toTreeNodes(root),
|
||||
rawKeyByNodeKey,
|
||||
leafNodeKeyByRawKey,
|
||||
groupKeys,
|
||||
};
|
||||
};
|
||||
|
||||
const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
const { connections } = useStore();
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const appearance = useStore(state => state.appearance);
|
||||
const darkMode = theme === 'dark';
|
||||
const opacity = normalizeOpacityForPlatform(appearance.opacity);
|
||||
const connection = connections.find(c => c.id === connectionId);
|
||||
const keyAccentColor = darkMode ? '#ffd666' : '#1677ff';
|
||||
const jsonAccentColor = darkMode ? '#f6c453' : '#1890ff';
|
||||
const valueToolbarBg = darkMode
|
||||
? `rgba(38, 38, 38, ${opacity})`
|
||||
: `rgba(245, 245, 245, ${opacity})`;
|
||||
const valueToolbarBorder = darkMode
|
||||
? `1px solid rgba(255, 255, 255, ${Math.max(0.12, Math.min(0.24, opacity * 0.22))})`
|
||||
: `1px solid rgba(0, 0, 0, ${Math.max(0.08, Math.min(0.2, opacity * 0.12))})`;
|
||||
const valueToolbarText = darkMode ? 'rgba(255, 255, 255, 0.78)' : '#666';
|
||||
|
||||
const [keys, setKeys] = useState<RedisKeyInfo[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [searchPattern, setSearchPattern] = useState('*');
|
||||
const [cursor, setCursor] = useState<number>(0);
|
||||
const [cursor, setCursor] = useState<string>('0');
|
||||
const [hasMore, setHasMore] = useState(false);
|
||||
const [selectedKey, setSelectedKey] = useState<string | null>(null);
|
||||
const [keyValue, setKeyValue] = useState<RedisValue | null>(null);
|
||||
@@ -443,11 +439,14 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
onSave: (newValue: string) => Promise<void>;
|
||||
} | null>(null);
|
||||
const jsonEditValueRef = useRef<string>('');
|
||||
const latestLoadRequestIdRef = useRef(0);
|
||||
|
||||
// 面板宽度状态和 ref - 默认占据 50% 宽度
|
||||
const [leftPanelWidth, setLeftPanelWidth] = useState<number | string>('50%');
|
||||
const leftPanelRef = useRef<HTMLDivElement>(null);
|
||||
const treeContainerRef = useRef<HTMLDivElement>(null);
|
||||
const [showTreeKeyTTL, setShowTreeKeyTTL] = useState(true);
|
||||
const [treeHeight, setTreeHeight] = useState(500);
|
||||
const [expandedGroupKeys, setExpandedGroupKeys] = useState<string[]>([]);
|
||||
|
||||
const getConfig = useCallback(() => {
|
||||
@@ -462,55 +461,78 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
};
|
||||
}, [connection, redisDB]);
|
||||
|
||||
const loadKeys = useCallback(async (pattern: string = '*', fromCursor: number = 0, append: boolean = false) => {
|
||||
const loadKeys = useCallback(async (
|
||||
pattern: string = '*',
|
||||
fromCursor: string = '0',
|
||||
append: boolean = false,
|
||||
targetCount?: number
|
||||
) => {
|
||||
const config = getConfig();
|
||||
if (!config) return;
|
||||
|
||||
const normalizedPattern = pattern.trim() || '*';
|
||||
const effectiveTargetCount = targetCount ?? getRedisScanLoadCount(normalizedPattern, append);
|
||||
const requestId = latestLoadRequestIdRef.current + 1;
|
||||
latestLoadRequestIdRef.current = requestId;
|
||||
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await (window as any).go.app.App.RedisScanKeys(config, pattern, fromCursor, 100);
|
||||
const res = await (window as any).go.app.App.RedisScanKeys(config, normalizedPattern, fromCursor, effectiveTargetCount);
|
||||
if (requestId !== latestLoadRequestIdRef.current) {
|
||||
return;
|
||||
}
|
||||
if (res.success) {
|
||||
const result = res.data;
|
||||
const scannedKeys = Array.isArray(result?.keys) ? result.keys : [];
|
||||
const nextCursor = normalizeRedisCursor(result?.cursor);
|
||||
if (append) {
|
||||
setKeys(prev => {
|
||||
const keyMap = new Map<string, RedisKeyInfo>();
|
||||
prev.forEach(item => keyMap.set(item.key, item));
|
||||
result.keys.forEach((item: RedisKeyInfo) => keyMap.set(item.key, item));
|
||||
scannedKeys.forEach((item: RedisKeyInfo) => keyMap.set(item.key, item));
|
||||
return Array.from(keyMap.values());
|
||||
});
|
||||
} else {
|
||||
setKeys(result.keys);
|
||||
setKeys(scannedKeys);
|
||||
}
|
||||
setCursor(result.cursor);
|
||||
setHasMore(result.cursor !== 0);
|
||||
setCursor(nextCursor);
|
||||
setHasMore(nextCursor !== '0');
|
||||
} else {
|
||||
message.error('加载 Key 失败: ' + res.message);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (requestId !== latestLoadRequestIdRef.current) {
|
||||
return;
|
||||
}
|
||||
message.error('加载 Key 失败: ' + (e?.message || String(e)));
|
||||
} finally {
|
||||
setLoading(false);
|
||||
if (requestId === latestLoadRequestIdRef.current) {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
}, [getConfig]);
|
||||
|
||||
useEffect(() => {
|
||||
loadKeys(searchPattern, 0, false);
|
||||
loadKeys(searchPattern, '0', false, getRedisScanLoadCount(searchPattern, false));
|
||||
}, [redisDB]);
|
||||
|
||||
const handleSearch = (value: string) => {
|
||||
const pattern = value.trim() || '*';
|
||||
setSearchPattern(pattern);
|
||||
setCursor(0);
|
||||
loadKeys(pattern, 0, false);
|
||||
setCursor('0');
|
||||
loadKeys(pattern, '0', false, getRedisScanLoadCount(pattern, false));
|
||||
};
|
||||
|
||||
const handleLoadMore = () => {
|
||||
loadKeys(searchPattern, cursor, true);
|
||||
if (!hasMore || loading) {
|
||||
return;
|
||||
}
|
||||
loadKeys(searchPattern, cursor, true, getRedisScanLoadCount(searchPattern, true));
|
||||
};
|
||||
|
||||
const handleRefresh = () => {
|
||||
setCursor(0);
|
||||
loadKeys(searchPattern, 0, false);
|
||||
setCursor('0');
|
||||
loadKeys(searchPattern, '0', false, getRedisScanLoadCount(searchPattern, false));
|
||||
};
|
||||
|
||||
const loadKeyValue = async (key: string) => {
|
||||
@@ -666,23 +688,51 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return () => window.removeEventListener('resize', handleWindowResize);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const target = treeContainerRef.current;
|
||||
if (!target) return;
|
||||
|
||||
const updateTreeHeight = (nextHeight: number) => {
|
||||
if (nextHeight <= 0) return;
|
||||
setTreeHeight((prev) => (prev === nextHeight ? prev : nextHeight));
|
||||
};
|
||||
|
||||
updateTreeHeight(Math.round(target.getBoundingClientRect().height));
|
||||
|
||||
if (typeof ResizeObserver !== 'undefined') {
|
||||
const observer = new ResizeObserver((entries) => {
|
||||
const nextHeight = Math.round(entries[0]?.contentRect.height || target.getBoundingClientRect().height);
|
||||
updateTreeHeight(nextHeight);
|
||||
});
|
||||
observer.observe(target);
|
||||
return () => observer.disconnect();
|
||||
}
|
||||
|
||||
const handleWindowResize = () => {
|
||||
updateTreeHeight(Math.round(target.getBoundingClientRect().height));
|
||||
};
|
||||
window.addEventListener('resize', handleWindowResize);
|
||||
return () => window.removeEventListener('resize', handleWindowResize);
|
||||
}, []);
|
||||
|
||||
const isLargeKeyspace = keys.length >= REDIS_LARGE_KEYSPACE_THRESHOLD;
|
||||
|
||||
const keyTree = useMemo(() => {
|
||||
return buildRedisKeyTree(keys, formatTTL, getTypeColor, showTreeKeyTTL);
|
||||
}, [keys, showTreeKeyTTL]);
|
||||
return buildRedisKeyTree(keys, !isLargeKeyspace);
|
||||
}, [isLargeKeyspace, keys]);
|
||||
|
||||
const groupKeySet = useMemo(() => new Set(keyTree.groupKeys), [keyTree.groupKeys]);
|
||||
|
||||
const selectedTreeNodeKeys = useMemo(() => {
|
||||
if (!selectedKey) {
|
||||
return [] as string[];
|
||||
}
|
||||
const nodeKey = keyTree.leafNodeKeyByRawKey.get(selectedKey);
|
||||
return nodeKey ? [nodeKey] : [];
|
||||
}, [selectedKey, keyTree]);
|
||||
return [buildLeafNodeKey(selectedKey)];
|
||||
}, [selectedKey]);
|
||||
|
||||
const checkedTreeNodeKeys = useMemo(() => {
|
||||
return selectedKeys
|
||||
.map(rawKey => keyTree.leafNodeKeyByRawKey.get(rawKey))
|
||||
.filter((nodeKey): nodeKey is string => Boolean(nodeKey));
|
||||
}, [selectedKeys, keyTree]);
|
||||
return selectedKeys.map(rawKey => buildLeafNodeKey(rawKey));
|
||||
}, [selectedKeys]);
|
||||
|
||||
useEffect(() => {
|
||||
const existingKeySet = new Set(keys.map(item => item.key));
|
||||
@@ -691,16 +741,19 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
|
||||
useEffect(() => {
|
||||
setExpandedGroupKeys((prev) => {
|
||||
const validKeys = prev.filter(nodeKey => keyTree.groupKeys.includes(nodeKey));
|
||||
return validKeys;
|
||||
const validKeys = prev.filter(nodeKey => groupKeySet.has(nodeKey));
|
||||
if (!isLargeKeyspace) {
|
||||
return validKeys;
|
||||
}
|
||||
return validKeys.slice(0, REDIS_LARGE_KEYSPACE_MAX_EXPANDED_GROUPS);
|
||||
});
|
||||
}, [keyTree]);
|
||||
}, [groupKeySet, isLargeKeyspace]);
|
||||
|
||||
const handleTreeSelect = (nodeKeys: React.Key[]) => {
|
||||
if (nodeKeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
const rawKey = keyTree.rawKeyByNodeKey.get(String(nodeKeys[0]));
|
||||
const rawKey = parseRawKeyFromNodeKey(nodeKeys[0]);
|
||||
if (!rawKey) {
|
||||
return;
|
||||
}
|
||||
@@ -710,11 +763,119 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
const handleTreeCheck = (checked: React.Key[] | { checked: React.Key[]; halfChecked: React.Key[] }) => {
|
||||
const checkedNodeKeys = Array.isArray(checked) ? checked : checked.checked;
|
||||
const rawKeys = checkedNodeKeys
|
||||
.map(nodeKey => keyTree.rawKeyByNodeKey.get(String(nodeKey)))
|
||||
.map(nodeKey => parseRawKeyFromNodeKey(nodeKey))
|
||||
.filter((rawKey): rawKey is string => Boolean(rawKey));
|
||||
setSelectedKeys(rawKeys);
|
||||
};
|
||||
|
||||
const renderTreeNodeTitle = useCallback((nodeData: DataNode) => {
|
||||
const treeNode = nodeData as RedisTreeDataNode;
|
||||
|
||||
if (treeNode.nodeType === 'group') {
|
||||
return (
|
||||
<Space size={6}>
|
||||
<FolderOpenOutlined style={{ color: '#8c8c8c' }} />
|
||||
<span>{treeNode.groupName}</span>
|
||||
<span style={{ fontSize: 12, color: '#999' }}>({treeNode.groupLeafCount ?? 0})</span>
|
||||
</Space>
|
||||
);
|
||||
}
|
||||
|
||||
const leafLabel = treeNode.leafLabel ?? '';
|
||||
const rawKey = treeNode.rawKey ?? parseRawKeyFromNodeKey(treeNode.key ?? '') ?? '';
|
||||
const keyType = treeNode.keyType ?? 'unknown';
|
||||
const ttl = typeof treeNode.ttl === 'number' ? treeNode.ttl : -1;
|
||||
|
||||
if (isLargeKeyspace) {
|
||||
return (
|
||||
<div style={{ minWidth: 0, overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap' }}>
|
||||
<span>{leafLabel}</span>
|
||||
<span style={{ marginLeft: 8, color: '#999', fontSize: 12 }}>[{keyType}]</span>
|
||||
{showTreeKeyTTL && (
|
||||
<span style={{ marginLeft: 8, color: '#999', fontSize: 12 }}>{formatTTL(ttl)}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 8,
|
||||
minWidth: 0,
|
||||
width: '100%',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 6,
|
||||
minWidth: 0,
|
||||
flex: 1,
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
>
|
||||
<KeyOutlined style={{ color: keyAccentColor, flexShrink: 0 }} />
|
||||
<Tooltip title={rawKey}>
|
||||
<span
|
||||
style={{
|
||||
minWidth: 0,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
whiteSpace: 'nowrap',
|
||||
display: 'block',
|
||||
}}
|
||||
>
|
||||
{leafLabel}
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Tag
|
||||
color={getTypeColor(keyType)}
|
||||
style={{
|
||||
marginInlineEnd: 0,
|
||||
width: showTreeKeyTTL ? REDIS_TREE_KEY_TYPE_WIDTH : REDIS_TREE_KEY_TYPE_WIDTH_NARROW,
|
||||
textAlign: 'center',
|
||||
flexShrink: 0
|
||||
}}
|
||||
>
|
||||
{keyType}
|
||||
</Tag>
|
||||
{showTreeKeyTTL && (
|
||||
<span
|
||||
style={{
|
||||
width: REDIS_TREE_KEY_TTL_WIDTH,
|
||||
fontSize: 12,
|
||||
color: '#999',
|
||||
textAlign: 'left',
|
||||
whiteSpace: 'nowrap',
|
||||
flexShrink: 0,
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{formatTTL(ttl)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}, [formatTTL, getTypeColor, isLargeKeyspace, showTreeKeyTTL]);
|
||||
|
||||
const handleTreeExpand = (nextExpandedKeys: React.Key[]) => {
|
||||
const validGroupKeys = nextExpandedKeys
|
||||
.map(key => String(key))
|
||||
.filter(nodeKey => groupKeySet.has(nodeKey));
|
||||
if (isLargeKeyspace) {
|
||||
setExpandedGroupKeys(validGroupKeys.slice(0, REDIS_LARGE_KEYSPACE_MAX_EXPANDED_GROUPS));
|
||||
return;
|
||||
}
|
||||
setExpandedGroupKeys(validGroupKeys);
|
||||
};
|
||||
|
||||
const renderValueEditor = () => {
|
||||
if (!keyValue || !selectedKey) {
|
||||
return <div style={{ padding: 20, textAlign: 'center', color: '#999' }}>选择一个 Key 查看详情</div>;
|
||||
@@ -754,13 +915,13 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
<div style={{ display: 'flex', flexDirection: 'column', height: '100%' }}>
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
background: '#f5f5f5',
|
||||
borderBottom: '1px solid #d9d9d9',
|
||||
background: valueToolbarBg,
|
||||
borderBottom: valueToolbarBorder,
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'center'
|
||||
}}>
|
||||
<span style={{ fontSize: 12, color: '#666' }}>
|
||||
<span style={{ fontSize: 12, color: valueToolbarText }}>
|
||||
{encoding && `编码: ${encoding}`}
|
||||
</span>
|
||||
<Radio.Group size="small" value={viewMode} onChange={(e) => setViewMode(e.target.value)}>
|
||||
@@ -773,6 +934,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
<Editor
|
||||
height="calc(100% - 72px)"
|
||||
language={isJson ? 'json' : 'plaintext'}
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={displayValue}
|
||||
options={{
|
||||
readOnly: true,
|
||||
@@ -922,7 +1084,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return (
|
||||
<Tooltip title={<pre style={{ maxHeight: 300, overflow: 'auto', margin: 0, fontSize: 12 }}>{tooltipContent}</pre>} styles={{ root: { maxWidth: 600 } }}>
|
||||
<span style={{
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? '#1890ff' : undefined),
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? jsonAccentColor : undefined),
|
||||
fontFamily: record.isBinary ? 'monospace' : undefined,
|
||||
fontSize: record.isBinary ? 11 : undefined
|
||||
}}>
|
||||
@@ -1101,7 +1263,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return (
|
||||
<Tooltip title={<pre style={{ maxHeight: 300, overflow: 'auto', margin: 0, fontSize: 12 }}>{tooltipContent}</pre>} styles={{ root: { maxWidth: 600 } }}>
|
||||
<span style={{
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? '#1890ff' : undefined),
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? jsonAccentColor : undefined),
|
||||
fontFamily: record.isBinary ? 'monospace' : undefined,
|
||||
fontSize: record.isBinary ? 11 : undefined
|
||||
}}>
|
||||
@@ -1256,7 +1418,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return (
|
||||
<Tooltip title={<pre style={{ maxHeight: 300, overflow: 'auto', margin: 0, fontSize: 12 }}>{tooltipContent}</pre>} styles={{ root: { maxWidth: 600 } }}>
|
||||
<span style={{
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? '#1890ff' : undefined),
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? jsonAccentColor : undefined),
|
||||
fontFamily: record.isBinary ? 'monospace' : undefined,
|
||||
fontSize: record.isBinary ? 11 : undefined
|
||||
}}>
|
||||
@@ -1410,7 +1572,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return (
|
||||
<Tooltip title={<pre style={{ maxHeight: 300, overflow: 'auto', margin: 0, fontSize: 12 }}>{tooltipContent}</pre>} styles={{ root: { maxWidth: 600 } }}>
|
||||
<span style={{
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? '#1890ff' : undefined),
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? jsonAccentColor : undefined),
|
||||
fontFamily: record.isBinary ? 'monospace' : undefined,
|
||||
fontSize: record.isBinary ? 11 : undefined
|
||||
}}>
|
||||
@@ -1624,7 +1786,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
return (
|
||||
<Tooltip title={<pre style={{ maxHeight: 300, overflow: 'auto', margin: 0, fontSize: 12 }}>{tooltipContent}</pre>} styles={{ root: { maxWidth: 720 } }}>
|
||||
<span style={{
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? '#1890ff' : undefined),
|
||||
color: record.isBinary ? '#d46b08' : (record.isJson ? jsonAccentColor : undefined),
|
||||
fontFamily: record.isBinary ? 'monospace' : undefined,
|
||||
fontSize: record.isBinary ? 11 : undefined
|
||||
}}>
|
||||
@@ -1757,27 +1919,37 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
</Popconfirm>
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ flex: 1, overflow: 'auto' }}>
|
||||
<Spin spinning={loading} size="small">
|
||||
<Tree
|
||||
blockNode
|
||||
showIcon={false}
|
||||
checkable
|
||||
checkStrictly
|
||||
selectable
|
||||
treeData={keyTree.treeData}
|
||||
selectedKeys={selectedTreeNodeKeys}
|
||||
checkedKeys={checkedTreeNodeKeys}
|
||||
expandedKeys={expandedGroupKeys}
|
||||
onExpand={(nextExpandedKeys) => setExpandedGroupKeys(nextExpandedKeys as string[])}
|
||||
onSelect={(nodeKeys) => handleTreeSelect(nodeKeys)}
|
||||
onCheck={(checked) => handleTreeCheck(checked)}
|
||||
style={{ padding: '8px 6px' }}
|
||||
/>
|
||||
</Spin>
|
||||
<div style={{ flex: 1, minHeight: 0, display: 'flex', flexDirection: 'column' }}>
|
||||
{isLargeKeyspace && (
|
||||
<div style={{ padding: '6px 8px', fontSize: 12, color: '#8c8c8c', borderBottom: '1px solid #f0f0f0' }}>
|
||||
已启用大数据量性能模式(简化节点渲染,最多保留 {REDIS_LARGE_KEYSPACE_MAX_EXPANDED_GROUPS} 个展开分组)
|
||||
</div>
|
||||
)}
|
||||
<div ref={treeContainerRef} style={{ flex: 1, minHeight: 0, overflow: 'hidden' }}>
|
||||
<Spin spinning={loading} size="small" style={{ width: '100%' }}>
|
||||
<Tree
|
||||
blockNode
|
||||
showIcon={false}
|
||||
checkable
|
||||
checkStrictly
|
||||
selectable
|
||||
virtual
|
||||
height={Math.max(treeHeight - 8, 220)}
|
||||
treeData={keyTree.treeData}
|
||||
titleRender={renderTreeNodeTitle}
|
||||
selectedKeys={selectedTreeNodeKeys}
|
||||
checkedKeys={checkedTreeNodeKeys}
|
||||
expandedKeys={expandedGroupKeys}
|
||||
onExpand={handleTreeExpand}
|
||||
onSelect={(nodeKeys) => handleTreeSelect(nodeKeys)}
|
||||
onCheck={(checked) => handleTreeCheck(checked)}
|
||||
style={{ padding: '8px 6px' }}
|
||||
/>
|
||||
</Spin>
|
||||
</div>
|
||||
{hasMore && (
|
||||
<div style={{ padding: 8, textAlign: 'center' }}>
|
||||
<Button onClick={handleLoadMore} loading={loading}>加载更多</Button>
|
||||
<Button onClick={handleLoadMore} loading={loading} disabled={!hasMore || loading}>加载更多</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -1807,6 +1979,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
<Editor
|
||||
height="450px"
|
||||
language={tryFormatJson(editValue).isJson ? 'json' : 'plaintext'}
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
value={editValue}
|
||||
onChange={(value) => setEditValue(value || '')}
|
||||
options={{
|
||||
@@ -1871,6 +2044,7 @@ const RedisViewer: React.FC<RedisViewerProps> = ({ connectionId, redisDB }) => {
|
||||
<Editor
|
||||
height="450px"
|
||||
language={jsonEditConfig?.isJson ? 'json' : 'plaintext'}
|
||||
theme={darkMode ? 'transparent-dark' : 'transparent-light'}
|
||||
defaultValue={jsonEditConfig?.value || ''}
|
||||
onChange={(value) => { jsonEditValueRef.current = value || ''; }}
|
||||
onMount={(editor) => { jsonEditValueRef.current = jsonEditConfig?.value || ''; }}
|
||||
|
||||
@@ -47,6 +47,8 @@ interface TreeNode {
|
||||
|
||||
type BatchTableExportMode = 'schema' | 'backup' | 'dataOnly';
|
||||
type BatchObjectType = 'table' | 'view';
|
||||
type BatchObjectFilterType = 'all' | BatchObjectType;
|
||||
type BatchSelectionScope = 'filtered' | 'all';
|
||||
|
||||
interface BatchObjectItem {
|
||||
title: string;
|
||||
@@ -133,11 +135,47 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
const [selectedConnection, setSelectedConnection] = useState<string>('');
|
||||
const [selectedDatabase, setSelectedDatabase] = useState<string>('');
|
||||
const [availableDatabases, setAvailableDatabases] = useState<any[]>([]);
|
||||
const [batchFilterKeyword, setBatchFilterKeyword] = useState<string>('');
|
||||
const [batchFilterType, setBatchFilterType] = useState<BatchObjectFilterType>('all');
|
||||
const [batchSelectionScope, setBatchSelectionScope] = useState<BatchSelectionScope>('filtered');
|
||||
const filteredBatchObjects = useMemo(() => {
|
||||
const keyword = batchFilterKeyword.trim().toLowerCase();
|
||||
return batchTables.filter((item) => {
|
||||
if (batchFilterType !== 'all' && item.objectType !== batchFilterType) {
|
||||
return false;
|
||||
}
|
||||
if (!keyword) {
|
||||
return true;
|
||||
}
|
||||
return item.title.toLowerCase().includes(keyword) || item.objectName.toLowerCase().includes(keyword);
|
||||
});
|
||||
}, [batchFilterKeyword, batchFilterType, batchTables]);
|
||||
const groupedBatchObjects = useMemo(() => {
|
||||
const tables = batchTables.filter(item => item.objectType === 'table');
|
||||
const views = batchTables.filter(item => item.objectType === 'view');
|
||||
const tables = filteredBatchObjects.filter(item => item.objectType === 'table');
|
||||
const views = filteredBatchObjects.filter(item => item.objectType === 'view');
|
||||
return { tables, views };
|
||||
}, [batchTables]);
|
||||
}, [filteredBatchObjects]);
|
||||
const allBatchObjectKeys = useMemo(() => batchTables.map(item => item.key), [batchTables]);
|
||||
const allBatchObjectKeysByType = useMemo(() => {
|
||||
if (batchFilterType === 'all') {
|
||||
return allBatchObjectKeys;
|
||||
}
|
||||
return batchTables
|
||||
.filter((item) => item.objectType === batchFilterType)
|
||||
.map((item) => item.key);
|
||||
}, [allBatchObjectKeys, batchFilterType, batchTables]);
|
||||
const filteredBatchObjectKeys = useMemo(() => filteredBatchObjects.map(item => item.key), [filteredBatchObjects]);
|
||||
const selectionScopeTargetKeys = useMemo(
|
||||
() => (batchSelectionScope === 'filtered' ? filteredBatchObjectKeys : allBatchObjectKeysByType),
|
||||
[allBatchObjectKeysByType, batchSelectionScope, filteredBatchObjectKeys]
|
||||
);
|
||||
useEffect(() => {
|
||||
if (batchFilterType === 'all') {
|
||||
return;
|
||||
}
|
||||
const allowed = new Set(allBatchObjectKeysByType);
|
||||
setCheckedTableKeys((prev) => prev.filter((key) => allowed.has(key)));
|
||||
}, [allBatchObjectKeysByType, batchFilterType]);
|
||||
|
||||
// Batch Database Operations Modal
|
||||
const [isBatchDbModalOpen, setIsBatchDbModalOpen] = useState(false);
|
||||
@@ -1313,6 +1351,9 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
setBatchTables([]);
|
||||
setCheckedTableKeys([]);
|
||||
setAvailableDatabases([]);
|
||||
setBatchFilterKeyword('');
|
||||
setBatchFilterType('all');
|
||||
setBatchSelectionScope('filtered');
|
||||
|
||||
if (connId) {
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
@@ -1413,6 +1454,9 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
setSelectedDatabase('');
|
||||
setBatchTables([]);
|
||||
setCheckedTableKeys([]);
|
||||
setBatchFilterKeyword('');
|
||||
setBatchFilterType('all');
|
||||
setBatchSelectionScope('filtered');
|
||||
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
@@ -1422,6 +1466,9 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
|
||||
const handleDatabaseChange = async (dbName: string) => {
|
||||
setSelectedDatabase(dbName);
|
||||
setBatchFilterKeyword('');
|
||||
setBatchFilterType('all');
|
||||
setBatchSelectionScope('filtered');
|
||||
|
||||
const conn = connections.find(c => c.id === selectedConnection);
|
||||
if (conn && dbName) {
|
||||
@@ -1470,17 +1517,44 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
};
|
||||
|
||||
const handleCheckAll = (checked: boolean) => {
|
||||
if (checked) {
|
||||
setCheckedTableKeys(batchTables.map(t => t.key));
|
||||
} else {
|
||||
setCheckedTableKeys([]);
|
||||
if (batchSelectionScope === 'all') {
|
||||
setCheckedTableKeys(checked ? allBatchObjectKeys : []);
|
||||
return;
|
||||
}
|
||||
if (filteredBatchObjectKeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
if (checked) {
|
||||
setCheckedTableKeys(prev => {
|
||||
const nextSet = new Set(prev);
|
||||
filteredBatchObjectKeys.forEach((key) => nextSet.add(key));
|
||||
return allBatchObjectKeys.filter((key) => nextSet.has(key));
|
||||
});
|
||||
return;
|
||||
}
|
||||
const filteredKeySet = new Set(filteredBatchObjectKeys);
|
||||
setCheckedTableKeys(prev => prev.filter((key) => !filteredKeySet.has(key)));
|
||||
};
|
||||
|
||||
const handleInvertSelection = () => {
|
||||
const allKeys = batchTables.map(t => t.key);
|
||||
const newChecked = allKeys.filter(k => !checkedTableKeys.includes(k));
|
||||
setCheckedTableKeys(newChecked);
|
||||
if (batchSelectionScope === 'all') {
|
||||
setCheckedTableKeys(prev => allBatchObjectKeys.filter((key) => !prev.includes(key)));
|
||||
return;
|
||||
}
|
||||
if (filteredBatchObjectKeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
setCheckedTableKeys(prev => {
|
||||
const nextSet = new Set(prev);
|
||||
filteredBatchObjectKeys.forEach((key) => {
|
||||
if (nextSet.has(key)) {
|
||||
nextSet.delete(key);
|
||||
} else {
|
||||
nextSet.add(key);
|
||||
}
|
||||
});
|
||||
return allBatchObjectKeys.filter((key) => nextSet.has(key));
|
||||
});
|
||||
};
|
||||
|
||||
const openBatchDatabaseModal = async () => {
|
||||
@@ -2874,6 +2948,43 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{batchTables.length > 0 && (
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
<Space wrap size={8} style={{ width: '100%' }}>
|
||||
<Input
|
||||
allowClear
|
||||
value={batchFilterKeyword}
|
||||
onChange={(e) => setBatchFilterKeyword(e.target.value)}
|
||||
placeholder="筛选表/视图名称"
|
||||
prefix={<SearchOutlined />}
|
||||
style={{ width: 260 }}
|
||||
/>
|
||||
<Select
|
||||
value={batchFilterType}
|
||||
onChange={(value) => setBatchFilterType(value as BatchObjectFilterType)}
|
||||
style={{ width: 140 }}
|
||||
options={[
|
||||
{ label: '全部对象', value: 'all' },
|
||||
{ label: '仅表', value: 'table' },
|
||||
{ label: '仅视图', value: 'view' },
|
||||
]}
|
||||
/>
|
||||
<Select
|
||||
value={batchSelectionScope}
|
||||
onChange={(value) => setBatchSelectionScope(value as BatchSelectionScope)}
|
||||
style={{ width: 220 }}
|
||||
options={[
|
||||
{ label: '勾选作用于:当前筛选结果', value: 'filtered' },
|
||||
{ label: '勾选作用于:全部对象', value: 'all' },
|
||||
]}
|
||||
/>
|
||||
</Space>
|
||||
<div style={{ marginTop: 6, color: '#999', fontSize: 12 }}>
|
||||
当前筛选命中 {filteredBatchObjects.length} / {batchTables.length} 个对象
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{batchTables.length > 0 && (
|
||||
<>
|
||||
<div style={{ marginBottom: 16 }}>
|
||||
@@ -2881,18 +2992,21 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={() => handleCheckAll(true)}
|
||||
disabled={selectionScopeTargetKeys.length === 0}
|
||||
>
|
||||
全选
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={() => handleCheckAll(false)}
|
||||
disabled={selectionScopeTargetKeys.length === 0}
|
||||
>
|
||||
取消全选
|
||||
</Button>
|
||||
<Button
|
||||
size="small"
|
||||
onClick={handleInvertSelection}
|
||||
disabled={selectionScopeTargetKeys.length === 0}
|
||||
>
|
||||
反选
|
||||
</Button>
|
||||
@@ -2938,6 +3052,11 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
</Space>
|
||||
</div>
|
||||
)}
|
||||
{groupedBatchObjects.tables.length === 0 && groupedBatchObjects.views.length === 0 && (
|
||||
<div style={{ color: '#999', padding: '8px 0' }}>
|
||||
无匹配对象
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Checkbox.Group>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import React, { useMemo, useRef, useState } from 'react';
|
||||
import { Tabs, Dropdown } from 'antd';
|
||||
import type { MenuProps } from 'antd';
|
||||
import type { MenuProps, TabsProps } from 'antd';
|
||||
import { DndContext, PointerSensor, closestCenter, useSensor, useSensors } from '@dnd-kit/core';
|
||||
import type { DragStartEvent, DragEndEvent } from '@dnd-kit/core';
|
||||
import { SortableContext, useSortable, horizontalListSortingStrategy } from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import { restrictToHorizontalAxis } from '@dnd-kit/modifiers';
|
||||
import { useStore } from '../store';
|
||||
import DataViewer from './DataViewer';
|
||||
import QueryEditor from './QueryEditor';
|
||||
@@ -29,9 +34,58 @@ const buildTabDisplayTitle = (tab: TabData, connectionName: string | undefined):
|
||||
return `[${prefix}] ${tab.title}`;
|
||||
};
|
||||
|
||||
type SortableTabLabelProps = {
|
||||
displayTitle: string;
|
||||
menuItems: MenuProps['items'];
|
||||
};
|
||||
|
||||
const SortableTabLabel: React.FC<SortableTabLabelProps> = ({
|
||||
displayTitle,
|
||||
menuItems,
|
||||
}) => {
|
||||
return (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span
|
||||
className="tab-dnd-label"
|
||||
onContextMenu={(e) => e.preventDefault()}
|
||||
title="拖拽调整标签顺序"
|
||||
>
|
||||
{displayTitle}
|
||||
</span>
|
||||
</Dropdown>
|
||||
);
|
||||
};
|
||||
|
||||
type DraggableTabNodeProps = {
|
||||
node: React.ReactElement;
|
||||
};
|
||||
|
||||
const DraggableTabNode: React.FC<DraggableTabNodeProps> = ({ node }) => {
|
||||
const tabId = String(node.key || '').trim();
|
||||
const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id: tabId });
|
||||
const style: React.CSSProperties = {
|
||||
...(node.props.style || {}),
|
||||
transform: CSS.Transform.toString(transform),
|
||||
transition: transition || 'transform 180ms cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
opacity: isDragging ? 0.88 : 1,
|
||||
cursor: isDragging ? 'grabbing' : 'grab',
|
||||
touchAction: 'none',
|
||||
zIndex: isDragging ? 2 : node.props.style?.zIndex,
|
||||
};
|
||||
|
||||
return React.cloneElement(node, {
|
||||
ref: setNodeRef,
|
||||
style,
|
||||
...attributes,
|
||||
...listeners,
|
||||
className: `${node.props.className || ''} tab-dnd-node${isDragging ? ' is-dragging' : ''}`,
|
||||
});
|
||||
};
|
||||
|
||||
const TabManager: React.FC = () => {
|
||||
const tabs = useStore(state => state.tabs);
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const activeTabId = useStore(state => state.activeTabId);
|
||||
const setActiveTab = useStore(state => state.setActiveTab);
|
||||
const closeTab = useStore(state => state.closeTab);
|
||||
@@ -39,6 +93,15 @@ const TabManager: React.FC = () => {
|
||||
const closeTabsToLeft = useStore(state => state.closeTabsToLeft);
|
||||
const closeTabsToRight = useStore(state => state.closeTabsToRight);
|
||||
const closeAllTabs = useStore(state => state.closeAllTabs);
|
||||
const moveTab = useStore(state => state.moveTab);
|
||||
const tabsNavBorderColor = theme === 'dark' ? 'rgba(255, 255, 255, 0.09)' : 'rgba(0, 0, 0, 0.08)';
|
||||
const [draggingTabId, setDraggingTabId] = useState<string | null>(null);
|
||||
const suppressClickUntilRef = useRef<number>(0);
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor, {
|
||||
activationConstraint: { distance: 8 },
|
||||
})
|
||||
);
|
||||
|
||||
const onChange = (newActiveKey: string) => {
|
||||
setActiveTab(newActiveKey);
|
||||
@@ -50,11 +113,43 @@ const TabManager: React.FC = () => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleDragStart = (event: DragStartEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
setDraggingTabId(sourceId || null);
|
||||
};
|
||||
|
||||
const handleDragEnd = (event: DragEndEvent) => {
|
||||
const sourceId = String(event.active.id || '').trim();
|
||||
const targetId = String(event.over?.id || '').trim();
|
||||
setDraggingTabId(null);
|
||||
if (!sourceId || !targetId || sourceId === targetId) {
|
||||
return;
|
||||
}
|
||||
suppressClickUntilRef.current = Date.now() + 120;
|
||||
moveTab(sourceId, targetId);
|
||||
};
|
||||
|
||||
const handleDragCancel = () => {
|
||||
setDraggingTabId(null);
|
||||
};
|
||||
|
||||
const tabIds = useMemo(() => tabs.map((tab) => tab.id), [tabs]);
|
||||
|
||||
const renderTabBar: TabsProps['renderTabBar'] = (tabBarProps, DefaultTabBar) => (
|
||||
<DefaultTabBar {...tabBarProps}>
|
||||
{(node) => <DraggableTabNode key={node.key} node={node} />}
|
||||
</DefaultTabBar>
|
||||
);
|
||||
|
||||
const items = useMemo(() => tabs.map((tab, index) => {
|
||||
const connectionName = connections.find((conn) => conn.id === tab.connectionId)?.name;
|
||||
const displayTitle = buildTabDisplayTitle(tab, connectionName);
|
||||
const keepMountedWhenInactive = tab.type === 'query' || tab.type === 'redis-command';
|
||||
const shouldRenderContent = activeTabId === tab.id || keepMountedWhenInactive;
|
||||
let content;
|
||||
if (tab.type === 'query') {
|
||||
if (!shouldRenderContent) {
|
||||
content = null;
|
||||
} else if (tab.type === 'query') {
|
||||
content = <QueryEditor tab={tab} />;
|
||||
} else if (tab.type === 'table') {
|
||||
content = <DataViewer tab={tab} />;
|
||||
@@ -100,14 +195,15 @@ const TabManager: React.FC = () => {
|
||||
|
||||
return {
|
||||
label: (
|
||||
<Dropdown menu={{ items: menuItems }} trigger={['contextMenu']}>
|
||||
<span onContextMenu={(e) => e.preventDefault()}>{displayTitle}</span>
|
||||
</Dropdown>
|
||||
<SortableTabLabel
|
||||
displayTitle={displayTitle}
|
||||
menuItems={menuItems}
|
||||
/>
|
||||
),
|
||||
key: tab.id,
|
||||
children: content,
|
||||
};
|
||||
}), [tabs, connections, closeOtherTabs, closeTabsToLeft, closeTabsToRight, closeAllTabs]);
|
||||
}), [tabs, connections, activeTabId, closeOtherTabs, closeTabsToLeft, closeTabsToRight, closeAllTabs]);
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -156,18 +252,63 @@ const TabManager: React.FC = () => {
|
||||
display: none !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-nav::before {
|
||||
border-bottom: none !important;
|
||||
border-bottom: 1px solid ${tabsNavBorderColor} !important;
|
||||
}
|
||||
.main-tabs .ant-tabs-tab {
|
||||
transition: transform 180ms cubic-bezier(0.22, 1, 0.36, 1), background-color 120ms ease;
|
||||
}
|
||||
.main-tabs .tab-dnd-label {
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
max-width: 100%;
|
||||
}
|
||||
.main-tabs .tab-dnd-node.is-dragging,
|
||||
.main-tabs .tab-dnd-node.is-dragging .tab-dnd-label {
|
||||
cursor: grabbing !important;
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(255, 214, 102, 0.72);
|
||||
background: rgba(255, 214, 102, 0.16);
|
||||
}
|
||||
body[data-theme='light'] .main-tabs .ant-tabs-tab-btn:focus-visible {
|
||||
outline: none !important;
|
||||
border-radius: 6px;
|
||||
box-shadow: 0 0 0 2px rgba(9, 109, 217, 0.32);
|
||||
background: rgba(9, 109, 217, 0.08);
|
||||
}
|
||||
body[data-theme='dark'] .main-tabs .ant-tabs-tab.ant-tabs-tab-active {
|
||||
background: rgba(255, 214, 102, 0.12) !important;
|
||||
border-color: rgba(255, 214, 102, 0.4) !important;
|
||||
}
|
||||
`}</style>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={onChange}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
/>
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={closestCenter}
|
||||
modifiers={[restrictToHorizontalAxis]}
|
||||
onDragStart={handleDragStart}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDragCancel={handleDragCancel}
|
||||
>
|
||||
<SortableContext items={tabIds} strategy={horizontalListSortingStrategy}>
|
||||
<Tabs
|
||||
className="main-tabs"
|
||||
type="editable-card"
|
||||
onChange={(newActiveKey) => {
|
||||
if (Date.now() < suppressClickUntilRef.current) return;
|
||||
onChange(newActiveKey);
|
||||
}}
|
||||
activeKey={activeTabId || undefined}
|
||||
onEdit={onEdit}
|
||||
items={items}
|
||||
hideAdd
|
||||
renderTabBar={renderTabBar}
|
||||
/>
|
||||
</SortableContext>
|
||||
</DndContext>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -259,6 +259,7 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const connections = useStore(state => state.connections);
|
||||
const theme = useStore(state => state.theme);
|
||||
const darkMode = theme === 'dark';
|
||||
const resizeGuideColor = darkMode ? '#f6c453' : '#1890ff';
|
||||
const readOnly = !!tab.readOnly;
|
||||
|
||||
const [tableHeight, setTableHeight] = useState(500);
|
||||
@@ -1973,7 +1974,7 @@ END;`;
|
||||
bottom: 0,
|
||||
left: 0,
|
||||
width: '2px',
|
||||
background: '#1890ff',
|
||||
background: resizeGuideColor,
|
||||
zIndex: 9999,
|
||||
display: 'none',
|
||||
pointerEvents: 'none',
|
||||
|
||||
@@ -6,21 +6,7 @@ import App from './App'
|
||||
// 全局配置 Monaco Editor 使用本地打包的文件,避免从 CDN (jsdelivr) 加载。
|
||||
// Windows WebView2 环境下访问外部 CDN 可能失败,导致编辑器一直显示 Loading。
|
||||
import { loader } from '@monaco-editor/react'
|
||||
import * as monaco from 'monaco-editor/esm/vs/editor/editor.api.js'
|
||||
import EditorWorker from 'monaco-editor/esm/vs/editor/editor.worker.js?worker'
|
||||
import JsonWorker from 'monaco-editor/esm/vs/language/json/json.worker.js?worker'
|
||||
import 'monaco-editor/esm/vs/basic-languages/sql/sql.contribution.js'
|
||||
import 'monaco-editor/esm/vs/language/json/monaco.contribution.js'
|
||||
|
||||
(self as any).MonacoEnvironment = {
|
||||
getWorker(_: unknown, label: string) {
|
||||
if (label === 'json') {
|
||||
return new JsonWorker()
|
||||
}
|
||||
return new EditorWorker()
|
||||
},
|
||||
}
|
||||
|
||||
import * as monaco from 'monaco-editor'
|
||||
loader.config({ monaco })
|
||||
|
||||
// 全局注册透明主题,避免每个 Editor 组件 beforeMount 中重复定义
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { create } from 'zustand';
|
||||
import { persist } from 'zustand/middleware';
|
||||
import { ConnectionConfig, SavedConnection, TabData, SavedQuery } from './types';
|
||||
import { ConnectionConfig, ProxyConfig, SavedConnection, TabData, SavedQuery } from './types';
|
||||
|
||||
const DEFAULT_APPEARANCE = { opacity: 1.0, blur: 0 };
|
||||
const DEFAULT_UI_SCALE = 1.0;
|
||||
const MIN_UI_SCALE = 0.8;
|
||||
const MAX_UI_SCALE = 1.25;
|
||||
const DEFAULT_FONT_SIZE = 14;
|
||||
const MIN_FONT_SIZE = 12;
|
||||
const MAX_FONT_SIZE = 20;
|
||||
const DEFAULT_STARTUP_FULLSCREEN = false;
|
||||
const LEGACY_DEFAULT_OPACITY = 0.95;
|
||||
const OPACITY_EPSILON = 1e-6;
|
||||
const MAX_URI_LENGTH = 4096;
|
||||
@@ -10,12 +17,23 @@ const MAX_HOST_ENTRY_LENGTH = 512;
|
||||
const MAX_HOST_ENTRIES = 64;
|
||||
const DEFAULT_TIMEOUT_SECONDS = 30;
|
||||
const MAX_TIMEOUT_SECONDS = 3600;
|
||||
const PERSIST_VERSION = 4;
|
||||
const DEFAULT_CONNECTION_TYPE = 'mysql';
|
||||
const DEFAULT_GLOBAL_PROXY: GlobalProxyConfig = {
|
||||
enabled: false,
|
||||
type: 'socks5',
|
||||
host: '',
|
||||
port: 1080,
|
||||
user: '',
|
||||
password: '',
|
||||
};
|
||||
const SUPPORTED_CONNECTION_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'doris',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'clickhouse',
|
||||
'postgres',
|
||||
'redis',
|
||||
'tdengine',
|
||||
@@ -36,12 +54,15 @@ const getDefaultPortByType = (type: string): number => {
|
||||
case 'mysql':
|
||||
case 'mariadb':
|
||||
return 3306;
|
||||
case 'doris':
|
||||
case 'diros':
|
||||
return 9030;
|
||||
case 'duckdb':
|
||||
return 0;
|
||||
case 'sphinx':
|
||||
return 9306;
|
||||
case 'clickhouse':
|
||||
return 9000;
|
||||
case 'postgres':
|
||||
case 'vastbase':
|
||||
return 5432;
|
||||
@@ -92,6 +113,13 @@ const normalizeIntegerInRange = (value: unknown, fallbackValue: number, min: num
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const normalizeFloatInRange = (value: unknown, fallbackValue: number, min: number, max: number): number => {
|
||||
const parsed = Number(value);
|
||||
if (!Number.isFinite(parsed)) return fallbackValue;
|
||||
if (parsed < min || parsed > max) return fallbackValue;
|
||||
return parsed;
|
||||
};
|
||||
|
||||
const isValidHostEntry = (entry: string): boolean => {
|
||||
if (!entry) return false;
|
||||
if (entry.length > MAX_HOST_ENTRY_LENGTH) return false;
|
||||
@@ -137,6 +165,9 @@ const sanitizeAddressList = (value: unknown): string[] => {
|
||||
|
||||
const normalizeConnectionType = (value: unknown): string => {
|
||||
const type = toTrimmedString(value).toLowerCase();
|
||||
if (type === 'doris') {
|
||||
return 'diros';
|
||||
}
|
||||
return SUPPORTED_CONNECTION_TYPES.has(type) ? type : DEFAULT_CONNECTION_TYPE;
|
||||
};
|
||||
|
||||
@@ -155,6 +186,16 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
password: toTrimmedString(sshRaw.password),
|
||||
keyPath: toTrimmedString(sshRaw.keyPath),
|
||||
};
|
||||
const proxyRaw = (raw.proxy && typeof raw.proxy === 'object') ? raw.proxy as Record<string, unknown> : {};
|
||||
const proxyTypeRaw = toTrimmedString(proxyRaw.type, 'socks5').toLowerCase();
|
||||
const proxyType: 'socks5' | 'http' = proxyTypeRaw === 'http' ? 'http' : 'socks5';
|
||||
const proxy = {
|
||||
type: proxyType,
|
||||
host: toTrimmedString(proxyRaw.host),
|
||||
port: normalizePort(proxyRaw.port, proxyTypeRaw === 'http' ? 8080 : 1080),
|
||||
user: toTrimmedString(proxyRaw.user),
|
||||
password: toTrimmedString(proxyRaw.password),
|
||||
};
|
||||
|
||||
const safeConfig: ConnectionConfig & Record<string, unknown> = {
|
||||
...raw,
|
||||
@@ -167,9 +208,11 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
database: toTrimmedString(raw.database),
|
||||
useSSH: !!raw.useSSH,
|
||||
ssh,
|
||||
useProxy: !!raw.useProxy,
|
||||
proxy,
|
||||
uri: toTrimmedString(raw.uri).slice(0, MAX_URI_LENGTH),
|
||||
hosts: sanitizeAddressList(raw.hosts),
|
||||
topology: raw.topology === 'replica' ? 'replica' : 'single',
|
||||
topology: raw.topology === 'replica' ? 'replica' : (raw.topology === 'cluster' ? 'cluster' : 'single'),
|
||||
mysqlReplicaUser: toTrimmedString(raw.mysqlReplicaUser),
|
||||
mysqlReplicaPassword: savePassword ? toTrimmedString(raw.mysqlReplicaPassword) : '',
|
||||
replicaSet: toTrimmedString(raw.replicaSet),
|
||||
@@ -194,12 +237,30 @@ const sanitizeConnectionConfig = (value: unknown): ConnectionConfig => {
|
||||
return safeConfig;
|
||||
};
|
||||
|
||||
const resolveConnectionConfigPayload = (raw: Record<string, unknown>): unknown => {
|
||||
if (raw.config && typeof raw.config === 'object') {
|
||||
return raw.config;
|
||||
}
|
||||
// 兼容历史/导入场景:连接对象可能是扁平结构(无 config 包装)。
|
||||
const hasLegacyFlatConfig =
|
||||
raw.type !== undefined ||
|
||||
raw.host !== undefined ||
|
||||
raw.port !== undefined ||
|
||||
raw.user !== undefined ||
|
||||
raw.database !== undefined;
|
||||
if (hasLegacyFlatConfig) {
|
||||
return raw;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const sanitizeSavedConnection = (value: unknown, index: number): SavedConnection | null => {
|
||||
if (!value || typeof value !== 'object') return null;
|
||||
const raw = value as Record<string, unknown>;
|
||||
const config = sanitizeConnectionConfig(raw.config);
|
||||
const config = sanitizeConnectionConfig(resolveConnectionConfigPayload(raw));
|
||||
const id = toTrimmedString(raw.id, `conn-${index + 1}`) || `conn-${index + 1}`;
|
||||
const fallbackName = config.host ? `${config.type}-${config.host}` : `连接-${index + 1}`;
|
||||
const displayType = config.type === 'diros' ? 'doris' : config.type;
|
||||
const fallbackName = config.host ? `${displayType}-${config.host}` : `连接-${index + 1}`;
|
||||
const name = toTrimmedString(raw.name, fallbackName) || fallbackName;
|
||||
const includeDatabases = sanitizeStringArray(raw.includeDatabases, 256);
|
||||
const includeRedisDatabases = sanitizeNumberArray(raw.includeRedisDatabases, 0, 15);
|
||||
@@ -258,6 +319,10 @@ export interface QueryOptions {
|
||||
showColumnType: boolean;
|
||||
}
|
||||
|
||||
export interface GlobalProxyConfig extends ProxyConfig {
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
interface AppState {
|
||||
connections: SavedConnection[];
|
||||
tabs: TabData[];
|
||||
@@ -266,6 +331,10 @@ interface AppState {
|
||||
savedQueries: SavedQuery[];
|
||||
theme: 'light' | 'dark';
|
||||
appearance: { opacity: number; blur: number };
|
||||
uiScale: number;
|
||||
fontSize: number;
|
||||
startupFullscreen: boolean;
|
||||
globalProxy: GlobalProxyConfig;
|
||||
sqlFormatOptions: { keywordCase: 'upper' | 'lower' };
|
||||
queryOptions: QueryOptions;
|
||||
sqlLogs: SqlLog[];
|
||||
@@ -283,6 +352,7 @@ interface AppState {
|
||||
closeTabsToRight: (id: string) => void;
|
||||
closeTabsByConnection: (connectionId: string) => void;
|
||||
closeTabsByDatabase: (connectionId: string, dbName: string) => void;
|
||||
moveTab: (sourceId: string, targetId: string) => void;
|
||||
closeAllTabs: () => void;
|
||||
setActiveTab: (id: string) => void;
|
||||
setActiveContext: (context: { connectionId: string; dbName: string } | null) => void;
|
||||
@@ -292,6 +362,10 @@ interface AppState {
|
||||
|
||||
setTheme: (theme: 'light' | 'dark') => void;
|
||||
setAppearance: (appearance: Partial<{ opacity: number; blur: number }>) => void;
|
||||
setUiScale: (scale: number) => void;
|
||||
setFontSize: (size: number) => void;
|
||||
setStartupFullscreen: (enabled: boolean) => void;
|
||||
setGlobalProxy: (proxy: Partial<GlobalProxyConfig>) => void;
|
||||
setSqlFormatOptions: (options: { keywordCase: 'upper' | 'lower' }) => void;
|
||||
setQueryOptions: (options: Partial<QueryOptions>) => void;
|
||||
|
||||
@@ -380,6 +454,44 @@ const sanitizeAppearance = (
|
||||
return nextAppearance;
|
||||
};
|
||||
|
||||
const sanitizeStartupFullscreen = (value: unknown): boolean => {
|
||||
return value === true;
|
||||
};
|
||||
|
||||
const sanitizeUiScale = (value: unknown): number => {
|
||||
return normalizeFloatInRange(value, DEFAULT_UI_SCALE, MIN_UI_SCALE, MAX_UI_SCALE);
|
||||
};
|
||||
|
||||
const sanitizeFontSize = (value: unknown): number => {
|
||||
return normalizeIntegerInRange(value, DEFAULT_FONT_SIZE, MIN_FONT_SIZE, MAX_FONT_SIZE);
|
||||
};
|
||||
|
||||
const sanitizeGlobalProxy = (value: unknown): GlobalProxyConfig => {
|
||||
const raw = (value && typeof value === 'object') ? value as Record<string, unknown> : {};
|
||||
const typeRaw = toTrimmedString(raw.type, DEFAULT_GLOBAL_PROXY.type).toLowerCase();
|
||||
const type: 'socks5' | 'http' = typeRaw === 'http' ? 'http' : 'socks5';
|
||||
const fallbackPort = type === 'http' ? 8080 : 1080;
|
||||
return {
|
||||
enabled: raw.enabled === true,
|
||||
type,
|
||||
host: toTrimmedString(raw.host),
|
||||
port: normalizePort(raw.port, fallbackPort),
|
||||
user: toTrimmedString(raw.user),
|
||||
password: toTrimmedString(raw.password),
|
||||
};
|
||||
};
|
||||
|
||||
const unwrapPersistedAppState = (persistedState: unknown): Record<string, unknown> => {
|
||||
if (!persistedState || typeof persistedState !== 'object') {
|
||||
return {};
|
||||
}
|
||||
const raw = persistedState as Record<string, unknown>;
|
||||
if (raw.state && typeof raw.state === 'object') {
|
||||
return raw.state as Record<string, unknown>;
|
||||
}
|
||||
return raw;
|
||||
};
|
||||
|
||||
export const useStore = create<AppState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
@@ -390,6 +502,10 @@ export const useStore = create<AppState>()(
|
||||
savedQueries: [],
|
||||
theme: 'light',
|
||||
appearance: { ...DEFAULT_APPEARANCE },
|
||||
uiScale: DEFAULT_UI_SCALE,
|
||||
fontSize: DEFAULT_FONT_SIZE,
|
||||
startupFullscreen: DEFAULT_STARTUP_FULLSCREEN,
|
||||
globalProxy: { ...DEFAULT_GLOBAL_PROXY },
|
||||
sqlFormatOptions: { keywordCase: 'upper' },
|
||||
queryOptions: { maxRows: 5000, showColumnComment: true, showColumnType: true },
|
||||
sqlLogs: [],
|
||||
@@ -483,6 +599,23 @@ export const useStore = create<AppState>()(
|
||||
};
|
||||
}),
|
||||
|
||||
moveTab: (sourceId, targetId) => set((state) => {
|
||||
const fromId = String(sourceId || '').trim();
|
||||
const toId = String(targetId || '').trim();
|
||||
if (!fromId || !toId || fromId === toId) {
|
||||
return state;
|
||||
}
|
||||
const fromIndex = state.tabs.findIndex((tab) => tab.id === fromId);
|
||||
const toIndex = state.tabs.findIndex((tab) => tab.id === toId);
|
||||
if (fromIndex < 0 || toIndex < 0 || fromIndex === toIndex) {
|
||||
return state;
|
||||
}
|
||||
const nextTabs = [...state.tabs];
|
||||
const [movingTab] = nextTabs.splice(fromIndex, 1);
|
||||
nextTabs.splice(toIndex, 0, movingTab);
|
||||
return { tabs: nextTabs };
|
||||
}),
|
||||
|
||||
closeAllTabs: () => set(() => ({ tabs: [], activeTabId: null })),
|
||||
|
||||
setActiveTab: (id) => set({ activeTabId: id }),
|
||||
@@ -501,6 +634,10 @@ export const useStore = create<AppState>()(
|
||||
|
||||
setTheme: (theme) => set({ theme }),
|
||||
setAppearance: (appearance) => set((state) => ({ appearance: { ...state.appearance, ...appearance } })),
|
||||
setUiScale: (scale) => set({ uiScale: sanitizeUiScale(scale) }),
|
||||
setFontSize: (size) => set({ fontSize: sanitizeFontSize(size) }),
|
||||
setStartupFullscreen: (enabled) => set({ startupFullscreen: !!enabled }),
|
||||
setGlobalProxy: (proxy) => set((state) => ({ globalProxy: sanitizeGlobalProxy({ ...state.globalProxy, ...proxy }) })),
|
||||
setSqlFormatOptions: (options) => set({ sqlFormatOptions: options }),
|
||||
setQueryOptions: (options) => set((state) => ({ queryOptions: { ...state.queryOptions, ...options } })),
|
||||
|
||||
@@ -530,17 +667,18 @@ export const useStore = create<AppState>()(
|
||||
}),
|
||||
{
|
||||
name: 'lite-db-storage', // name of the item in the storage (must be unique)
|
||||
version: 3,
|
||||
version: PERSIST_VERSION,
|
||||
migrate: (persistedState: unknown, version: number) => {
|
||||
if (!persistedState || typeof persistedState !== 'object') {
|
||||
return persistedState as AppState;
|
||||
}
|
||||
const state = persistedState as Partial<AppState>;
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
const nextState: Partial<AppState> = { ...state };
|
||||
nextState.connections = sanitizeConnections(state.connections);
|
||||
nextState.savedQueries = sanitizeSavedQueries(state.savedQueries);
|
||||
nextState.theme = sanitizeTheme(state.theme);
|
||||
nextState.appearance = sanitizeAppearance(state.appearance, version);
|
||||
nextState.uiScale = sanitizeUiScale(state.uiScale);
|
||||
nextState.fontSize = sanitizeFontSize(state.fontSize);
|
||||
nextState.startupFullscreen = sanitizeStartupFullscreen(state.startupFullscreen);
|
||||
nextState.globalProxy = sanitizeGlobalProxy(state.globalProxy);
|
||||
nextState.sqlFormatOptions = sanitizeSqlFormatOptions(state.sqlFormatOptions);
|
||||
nextState.queryOptions = sanitizeQueryOptions(state.queryOptions);
|
||||
nextState.tableAccessCount = sanitizeTableAccessCount(state.tableAccessCount);
|
||||
@@ -548,16 +686,18 @@ export const useStore = create<AppState>()(
|
||||
return nextState as AppState;
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const state = (persistedState && typeof persistedState === 'object')
|
||||
? persistedState as Partial<AppState>
|
||||
: {};
|
||||
const state = unwrapPersistedAppState(persistedState) as Partial<AppState>;
|
||||
return {
|
||||
...currentState,
|
||||
...state,
|
||||
connections: sanitizeConnections(state.connections),
|
||||
savedQueries: sanitizeSavedQueries(state.savedQueries),
|
||||
theme: sanitizeTheme(state.theme),
|
||||
appearance: sanitizeAppearance(state.appearance, 3),
|
||||
appearance: sanitizeAppearance(state.appearance, PERSIST_VERSION),
|
||||
uiScale: sanitizeUiScale(state.uiScale),
|
||||
fontSize: sanitizeFontSize(state.fontSize),
|
||||
startupFullscreen: sanitizeStartupFullscreen(state.startupFullscreen),
|
||||
globalProxy: sanitizeGlobalProxy(state.globalProxy),
|
||||
sqlFormatOptions: sanitizeSqlFormatOptions(state.sqlFormatOptions),
|
||||
queryOptions: sanitizeQueryOptions(state.queryOptions),
|
||||
tableAccessCount: sanitizeTableAccessCount(state.tableAccessCount),
|
||||
@@ -569,6 +709,10 @@ export const useStore = create<AppState>()(
|
||||
savedQueries: state.savedQueries,
|
||||
theme: state.theme,
|
||||
appearance: state.appearance,
|
||||
uiScale: state.uiScale,
|
||||
fontSize: state.fontSize,
|
||||
startupFullscreen: state.startupFullscreen,
|
||||
globalProxy: state.globalProxy,
|
||||
sqlFormatOptions: state.sqlFormatOptions,
|
||||
queryOptions: state.queryOptions,
|
||||
tableAccessCount: state.tableAccessCount,
|
||||
|
||||
@@ -6,6 +6,14 @@ export interface SSHConfig {
|
||||
keyPath?: string;
|
||||
}
|
||||
|
||||
export interface ProxyConfig {
|
||||
type: 'socks5' | 'http';
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface ConnectionConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
@@ -16,13 +24,15 @@ export interface ConnectionConfig {
|
||||
database?: string;
|
||||
useSSH?: boolean;
|
||||
ssh?: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
redisDB?: number; // Redis database index (0-15)
|
||||
uri?: string; // Connection URI for copy/paste
|
||||
hosts?: string[]; // Multi-host addresses: host:port
|
||||
topology?: 'single' | 'replica';
|
||||
topology?: 'single' | 'replica' | 'cluster';
|
||||
mysqlReplicaUser?: string;
|
||||
mysqlReplicaPassword?: string;
|
||||
replicaSet?: string;
|
||||
@@ -127,7 +137,7 @@ export interface RedisKeyInfo {
|
||||
|
||||
export interface RedisScanResult {
|
||||
keys: RedisKeyInfo[];
|
||||
cursor: number;
|
||||
cursor: string;
|
||||
}
|
||||
|
||||
export interface RedisValue {
|
||||
|
||||
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
86
frontend/src/utils/dataSourceCapabilities.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import type { ConnectionConfig } from '../types';
|
||||
|
||||
type ConnectionLike = Pick<ConnectionConfig, 'type' | 'driver'> | null | undefined;
|
||||
|
||||
const normalizeDataSourceToken = (raw: string): string => {
|
||||
const normalized = String(raw || '').trim().toLowerCase();
|
||||
switch (normalized) {
|
||||
case 'doris':
|
||||
return 'diros';
|
||||
case 'postgresql':
|
||||
return 'postgres';
|
||||
case 'dm':
|
||||
return 'dameng';
|
||||
default:
|
||||
return normalized;
|
||||
}
|
||||
};
|
||||
|
||||
export const resolveDataSourceType = (config: ConnectionLike): string => {
|
||||
if (!config) return '';
|
||||
const type = normalizeDataSourceToken(String(config.type || ''));
|
||||
if (type === 'custom') {
|
||||
const driver = normalizeDataSourceToken(String(config.driver || ''));
|
||||
return driver || 'custom';
|
||||
}
|
||||
return type;
|
||||
};
|
||||
|
||||
const SQL_QUERY_EXPORT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const COPY_INSERT_TYPES = new Set([
|
||||
'mysql',
|
||||
'mariadb',
|
||||
'diros',
|
||||
'sphinx',
|
||||
'postgres',
|
||||
'kingbase',
|
||||
'highgo',
|
||||
'vastbase',
|
||||
'sqlserver',
|
||||
'sqlite',
|
||||
'duckdb',
|
||||
'oracle',
|
||||
'dameng',
|
||||
'tdengine',
|
||||
'clickhouse',
|
||||
]);
|
||||
|
||||
const QUERY_EDITOR_DISABLED_TYPES = new Set(['redis']);
|
||||
const FORCE_READ_ONLY_QUERY_TYPES = new Set(['tdengine', 'clickhouse']);
|
||||
|
||||
export type DataSourceCapabilities = {
|
||||
type: string;
|
||||
supportsQueryEditor: boolean;
|
||||
supportsSqlQueryExport: boolean;
|
||||
supportsCopyInsert: boolean;
|
||||
forceReadOnlyQueryResult: boolean;
|
||||
};
|
||||
|
||||
export const getDataSourceCapabilities = (config: ConnectionLike): DataSourceCapabilities => {
|
||||
const type = resolveDataSourceType(config);
|
||||
return {
|
||||
type,
|
||||
supportsQueryEditor: !QUERY_EDITOR_DISABLED_TYPES.has(type),
|
||||
supportsSqlQueryExport: SQL_QUERY_EXPORT_TYPES.has(type),
|
||||
supportsCopyInsert: COPY_INSERT_TYPES.has(type),
|
||||
forceReadOnlyQueryResult: FORCE_READ_ONLY_QUERY_TYPES.has(type),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -36,7 +36,7 @@ export const quoteIdentPart = (dbType: string, ident: string) => {
|
||||
if (!raw) return raw;
|
||||
const dbTypeLower = (dbType || '').toLowerCase();
|
||||
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine') {
|
||||
if (dbTypeLower === 'mysql' || dbTypeLower === 'mariadb' || dbTypeLower === 'diros' || dbTypeLower === 'sphinx' || dbTypeLower === 'tdengine' || dbTypeLower === 'clickhouse') {
|
||||
return `\`${raw.replace(/`/g, '``')}\``;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,54 +1,6 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
const normalizeModuleId = (id: string): string => id.replace(/\\/g, '/')
|
||||
|
||||
const sanitizeChunkToken = (raw: string): string =>
|
||||
String(raw || '')
|
||||
.trim()
|
||||
.replace(/[^a-zA-Z0-9_-]/g, '-')
|
||||
.replace(/-+/g, '-')
|
||||
.replace(/^-|-$/g, '') || 'misc'
|
||||
|
||||
const firstSegmentAfter = (id: string, marker: string): string => {
|
||||
const idx = id.indexOf(marker)
|
||||
if (idx < 0) return ''
|
||||
const rest = id.substring(idx + marker.length)
|
||||
const [segment] = rest.split('/')
|
||||
return sanitizeChunkToken(segment)
|
||||
}
|
||||
|
||||
const resolveMonacoChunk = (id: string, prefix: string): string | undefined => {
|
||||
if (!id.includes('/node_modules/monaco-editor/')) return undefined
|
||||
|
||||
if (id.includes('/esm/vs/language/typescript/')) {
|
||||
if (id.includes('typescriptServices')) return `${prefix}-ts-services`
|
||||
return `${prefix}-typescript`
|
||||
}
|
||||
if (id.includes('/esm/vs/language/json/')) return `${prefix}-json`
|
||||
if (id.includes('/esm/vs/language/css/')) return `${prefix}-css`
|
||||
if (id.includes('/esm/vs/language/html/')) return `${prefix}-html`
|
||||
|
||||
if (id.includes('/esm/vs/editor/contrib/')) {
|
||||
return `${prefix}-editor-contrib-${firstSegmentAfter(id, '/esm/vs/editor/contrib/')}`
|
||||
}
|
||||
if (id.includes('/esm/vs/editor/browser/')) {
|
||||
return `${prefix}-editor-browser-${firstSegmentAfter(id, '/esm/vs/editor/browser/')}`
|
||||
}
|
||||
if (id.includes('/esm/vs/editor/common/')) {
|
||||
return `${prefix}-editor-common-${firstSegmentAfter(id, '/esm/vs/editor/common/')}`
|
||||
}
|
||||
if (id.includes('/esm/vs/editor/')) return `${prefix}-editor`
|
||||
|
||||
if (id.includes('/esm/vs/base/browser/')) return `${prefix}-base-browser`
|
||||
if (id.includes('/esm/vs/base/common/')) return `${prefix}-base-common`
|
||||
if (id.includes('/esm/vs/base/')) return `${prefix}-base`
|
||||
|
||||
if (id.includes('/esm/vs/platform/')) return `${prefix}-platform`
|
||||
|
||||
return `${prefix}-misc`
|
||||
}
|
||||
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
@@ -59,61 +11,5 @@ export default defineConfig({
|
||||
build: {
|
||||
outDir: 'dist', // Standard Wails output directory
|
||||
emptyOutDir: true,
|
||||
rollupOptions: {
|
||||
output: {
|
||||
manualChunks(id) {
|
||||
const moduleId = normalizeModuleId(id)
|
||||
if (!moduleId.includes('node_modules')) return undefined
|
||||
|
||||
const monacoChunk = resolveMonacoChunk(moduleId, 'vendor-monaco')
|
||||
if (monacoChunk) {
|
||||
return monacoChunk
|
||||
}
|
||||
if (moduleId.includes('/node_modules/@monaco-editor/react/')) return 'vendor-monaco-react'
|
||||
|
||||
if (moduleId.includes('/node_modules/antd/es/')) {
|
||||
return `vendor-antd-${firstSegmentAfter(moduleId, '/node_modules/antd/es/')}`
|
||||
}
|
||||
if (moduleId.includes('/node_modules/antd/')) return 'vendor-antd'
|
||||
if (moduleId.includes('/node_modules/@ant-design/icons/')) return 'vendor-antd-icons'
|
||||
if (moduleId.includes('/node_modules/@ant-design/cssinjs/')) return 'vendor-antd-css'
|
||||
if (moduleId.includes('/node_modules/rc-')) return 'vendor-antd-rc'
|
||||
|
||||
if (moduleId.includes('/node_modules/@dnd-kit/')) return 'vendor-dnd-kit'
|
||||
if (moduleId.includes('/node_modules/sql-formatter/')) return 'vendor-sql-formatter'
|
||||
|
||||
if (
|
||||
moduleId.includes('/node_modules/react/')
|
||||
|| moduleId.includes('/node_modules/react-dom/')
|
||||
|| moduleId.includes('/node_modules/scheduler/')
|
||||
) {
|
||||
return 'vendor-react'
|
||||
}
|
||||
|
||||
if (
|
||||
moduleId.includes('/node_modules/zustand/')
|
||||
|| moduleId.includes('/node_modules/uuid/')
|
||||
|| moduleId.includes('/node_modules/clsx/')
|
||||
|| moduleId.includes('/node_modules/react-resizable/')
|
||||
) {
|
||||
return 'vendor-utils'
|
||||
}
|
||||
|
||||
return 'vendor-misc'
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
worker: {
|
||||
format: 'es',
|
||||
rollupOptions: {
|
||||
output: {
|
||||
manualChunks(id) {
|
||||
const moduleId = normalizeModuleId(id)
|
||||
if (!moduleId.includes('node_modules')) return undefined
|
||||
return resolveMonacoChunk(moduleId, 'worker-monaco')
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
22
frontend/wailsjs/go/app/App.d.ts
vendored
22
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -6,10 +6,14 @@ import {redis} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckDriverNetworkStatus():Promise<connection.QueryResult>;
|
||||
|
||||
export function CheckForUpdates():Promise<connection.QueryResult>;
|
||||
|
||||
export function ConfigureDriverRuntimeDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ConfigureGlobalProxy(arg1:boolean,arg2:connection.ProxyConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function CreateDatabase(arg1:connection.ConnectionConfig,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBConnect(arg1:connection.ConnectionConfig):Promise<connection.QueryResult>;
|
||||
@@ -30,6 +34,8 @@ export function DBGetTriggers(arg1:connection.ConnectionConfig,arg2:string,arg3:
|
||||
|
||||
export function DBQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBQueryIsolated(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
@@ -38,7 +44,7 @@ export function DataSyncAnalyze(arg1:sync.SyncConfig):Promise<connection.QueryRe
|
||||
|
||||
export function DataSyncPreview(arg1:sync.SyncConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function DownloadDriverPackage(arg1:string,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
export function DownloadDriverPackage(arg1:string,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DownloadUpdate():Promise<connection.QueryResult>;
|
||||
|
||||
@@ -66,6 +72,12 @@ export function GetAppInfo():Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverStatusList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverVersionList(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetDriverVersionPackageSize(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function GetGlobalProxyConfig():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportConfigFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function ImportData(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
@@ -88,6 +100,8 @@ export function MySQLQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:str
|
||||
|
||||
export function MySQLShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenDownloadedUpdateDirectory():Promise<connection.QueryResult>;
|
||||
|
||||
export function OpenSQLFile():Promise<connection.QueryResult>;
|
||||
|
||||
export function PreviewImportFile(arg1:string):Promise<connection.QueryResult>;
|
||||
@@ -114,7 +128,7 @@ export function RedisListSet(arg1:connection.ConnectionConfig,arg2:string,arg3:n
|
||||
|
||||
export function RedisRenameKey(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:number,arg4:number):Promise<connection.QueryResult>;
|
||||
export function RedisScanKeys(arg1:connection.ConnectionConfig,arg2:string,arg3:any,arg4:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function RedisSelectDB(arg1:connection.ConnectionConfig,arg2:number):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -152,8 +166,12 @@ export function ResolveDriverPackageDownloadURL(arg1:string,arg2:string):Promise
|
||||
|
||||
export function ResolveDriverRepositoryURL(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDatabaseFile(arg1:string,arg2:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectDriverPackageFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function SelectSSHKeyFile(arg1:string):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -6,6 +6,10 @@ export function ApplyChanges(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ApplyChanges'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function CheckDriverNetworkStatus() {
|
||||
return window['go']['app']['App']['CheckDriverNetworkStatus']();
|
||||
}
|
||||
|
||||
export function CheckForUpdates() {
|
||||
return window['go']['app']['App']['CheckForUpdates']();
|
||||
}
|
||||
@@ -14,6 +18,10 @@ export function ConfigureDriverRuntimeDirectory(arg1) {
|
||||
return window['go']['app']['App']['ConfigureDriverRuntimeDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function ConfigureGlobalProxy(arg1, arg2) {
|
||||
return window['go']['app']['App']['ConfigureGlobalProxy'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function CreateDatabase(arg1, arg2) {
|
||||
return window['go']['app']['App']['CreateDatabase'](arg1, arg2);
|
||||
}
|
||||
@@ -54,6 +62,10 @@ export function DBQuery(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQuery'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBQueryIsolated(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBQueryIsolated'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DBShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
@@ -70,8 +82,8 @@ export function DataSyncPreview(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DataSyncPreview'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DownloadDriverPackage(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DownloadDriverPackage'](arg1, arg2, arg3);
|
||||
export function DownloadDriverPackage(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['DownloadDriverPackage'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
export function DownloadUpdate() {
|
||||
@@ -126,6 +138,18 @@ export function GetDriverStatusList(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverStatusList'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetDriverVersionList(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverVersionList'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetDriverVersionPackageSize(arg1, arg2) {
|
||||
return window['go']['app']['App']['GetDriverVersionPackageSize'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function GetGlobalProxyConfig() {
|
||||
return window['go']['app']['App']['GetGlobalProxyConfig']();
|
||||
}
|
||||
|
||||
export function ImportConfigFile() {
|
||||
return window['go']['app']['App']['ImportConfigFile']();
|
||||
}
|
||||
@@ -170,6 +194,10 @@ export function MySQLShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['MySQLShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function OpenDownloadedUpdateDirectory() {
|
||||
return window['go']['app']['App']['OpenDownloadedUpdateDirectory']();
|
||||
}
|
||||
|
||||
export function OpenSQLFile() {
|
||||
return window['go']['app']['App']['OpenSQLFile']();
|
||||
}
|
||||
@@ -298,10 +326,18 @@ export function ResolveDriverRepositoryURL(arg1) {
|
||||
return window['go']['app']['App']['ResolveDriverRepositoryURL'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDatabaseFile(arg1, arg2) {
|
||||
return window['go']['app']['App']['SelectDatabaseFile'](arg1, arg2);
|
||||
}
|
||||
|
||||
export function SelectDriverDownloadDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverDownloadDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageDirectory(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageDirectory'](arg1);
|
||||
}
|
||||
|
||||
export function SelectDriverPackageFile(arg1) {
|
||||
return window['go']['app']['App']['SelectDriverPackageFile'](arg1);
|
||||
}
|
||||
|
||||
@@ -48,6 +48,26 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
export class ProxyConfig {
|
||||
type: string;
|
||||
host: string;
|
||||
port: number;
|
||||
user?: string;
|
||||
password?: string;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ProxyConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.type = source["type"];
|
||||
this.host = source["host"];
|
||||
this.port = source["port"];
|
||||
this.user = source["user"];
|
||||
this.password = source["password"];
|
||||
}
|
||||
}
|
||||
export class SSHConfig {
|
||||
host: string;
|
||||
port: number;
|
||||
@@ -78,6 +98,8 @@ export namespace connection {
|
||||
database: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
useProxy?: boolean;
|
||||
proxy?: ProxyConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
@@ -110,6 +132,8 @@ export namespace connection {
|
||||
this.database = source["database"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.useProxy = source["useProxy"];
|
||||
this.proxy = this.convertValues(source["proxy"], ProxyConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
@@ -146,6 +170,7 @@ export namespace connection {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
|
||||
export class QueryResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
|
||||
16
go.mod
16
go.mod
@@ -5,6 +5,7 @@ go 1.24.3
|
||||
require (
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3
|
||||
gitee.com/chunanyong/dm v1.8.22
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/highgo/pq-sm3 v0.0.0
|
||||
@@ -17,12 +18,16 @@ require (
|
||||
github.com/xuri/excelize/v2 v2.10.0
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/mod v0.32.0
|
||||
golang.org/x/net v0.49.0
|
||||
golang.org/x/text v0.33.0
|
||||
modernc.org/sqlite v1.44.3
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/ClickHouse/ch-go v0.71.0 // indirect
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.5.1 // indirect
|
||||
github.com/bep/debounce v1.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
@@ -34,6 +39,8 @@ require (
|
||||
github.com/duckdb/duckdb-go-bindings/lib/linux-arm64 v0.3.3 // indirect
|
||||
github.com/duckdb/duckdb-go-bindings/lib/windows-amd64 v0.3.3 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
@@ -44,7 +51,7 @@ require (
|
||||
github.com/google/flatbuffers v25.12.19+incompatible // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.18.3 // indirect
|
||||
@@ -60,6 +67,7 @@ require (
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/paulmach/orb v0.12.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.25 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
@@ -68,6 +76,7 @@ require (
|
||||
github.com/richardlehane/msoleps v1.0.4 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/samber/lo v1.49.1 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/tiendc/go-deepcopy v1.7.1 // indirect
|
||||
github.com/tkrajina/go-reflector v0.5.8 // indirect
|
||||
@@ -82,9 +91,10 @@ require (
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/zeebo/xxh3 v1.1.0 // indirect
|
||||
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/net v0.49.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 // indirect
|
||||
|
||||
75
go.sum
75
go.sum
@@ -16,6 +16,10 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1 h1:bFWuo
|
||||
github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.1.1/go.mod h1:Vih/3yc6yac2JzU4hzpaDupBJP0Flaia9rXXrU8xyww=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||
github.com/ClickHouse/ch-go v0.71.0 h1:bUdZ/EZj/LcVHsMqaRUP2holqygrPWQKeMjc6nZoyRM=
|
||||
github.com/ClickHouse/ch-go v0.71.0/go.mod h1:NwbNc+7jaqfY58dmdDUbG4Jl22vThgx1cYjBw0vtgXw=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0 h1:fUR05TrF1GyvLDa/mAQjkx7KbgwdLRffs2n9O3WobtE=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.43.0/go.mod h1:o6jf7JM/zveWC/PP277BLxjHy5KjnGX/jfljhM4s34g=
|
||||
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
|
||||
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
|
||||
github.com/apache/arrow-go/v18 v18.5.1 h1:yaQ6zxMGgf9YCYw4/oaeOU3AULySDlAYDOcnr4LdHdI=
|
||||
@@ -52,6 +56,10 @@ github.com/duckdb/duckdb-go/v2 v2.5.5 h1:TlK8ipnzoKW2aNrjGqRkFWLCDpJDxR/VwH8ezEc
|
||||
github.com/duckdb/duckdb-go/v2 v2.5.5/go.mod h1:6uIbC3gz36NCEygECzboygOo/Z9TeVwox/puG+ohWV0=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
|
||||
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||
github.com/go-faster/errors v0.7.1/go.mod h1:5ySTjWFiphBs07IKuiL69nxdfd5+fzh1u7FPGZP2quo=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
@@ -62,19 +70,23 @@ github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
|
||||
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
|
||||
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible h1:haMV2JRRJCe1998HeW/p0X9UaMTK6SDo0ffLn2+DbLs=
|
||||
github.com/google/flatbuffers v25.12.19+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
@@ -83,20 +95,29 @@ github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
|
||||
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
|
||||
github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e h1:Q3+PugElBCf4PFpxhErSzU3/PY5sFL5Z6rfv4AbGAck=
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw=
|
||||
github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/labstack/echo/v4 v4.13.3 h1:pwhpCPrTl5qry5HRdM5FwdXnhXSLSY+WE+YQSeCaafY=
|
||||
@@ -134,8 +155,14 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/paulmach/orb v0.12.0 h1:z+zOwjmG3MyEEqzv92UN49Lg1JFYx0L9GpGKNVDKk1s=
|
||||
github.com/paulmach/orb v0.12.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
|
||||
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
|
||||
github.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0=
|
||||
github.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||
@@ -159,6 +186,8 @@ github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
|
||||
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
@@ -169,6 +198,7 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
@@ -176,6 +206,7 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8 h1:N2H6HLLZH2ve2ipcoFgG9BJS+yW0XksqNYwEdSmHaJk=
|
||||
github.com/taosdata/driver-go/v3 v3.7.8/go.mod h1:gSxBEPOueMg0rTmMO1Ug6aeD7AwGdDGvUtLrsDTTpYc=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tiendc/go-deepcopy v1.7.1 h1:LnubftI6nYaaMOcaz0LphzwraqN8jiWTwm416sitff4=
|
||||
github.com/tiendc/go-deepcopy v1.7.1/go.mod h1:4bKjNC2r7boYOkD2IOuZpYjmlDdzjbpTRyCx+goBCJQ=
|
||||
github.com/tkrajina/go-reflector v0.5.8 h1:yPADHrwmUbMq4RGEyaOUpz2H90sRsETNVpjzo3DLVQQ=
|
||||
@@ -192,8 +223,10 @@ github.com/wailsapp/wails/v2 v2.11.0 h1:seLacV8pqupq32IjS4Y7V8ucab0WZwtK6VvUVxSB
|
||||
github.com/wailsapp/wails/v2 v2.11.0/go.mod h1:jrf0ZaM6+GBc1wRmXsM8cIvzlg0karYin3erahI4+0k=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
github.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=
|
||||
github.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=
|
||||
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
|
||||
github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=
|
||||
github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=
|
||||
github.com/xuri/efp v0.0.1 h1:fws5Rv3myXyYni8uwj2qKjVaRP30PdjeYe2Y6FDsCL8=
|
||||
@@ -202,38 +235,64 @@ github.com/xuri/excelize/v2 v2.10.0 h1:8aKsP7JD39iKLc6dH5Tw3dgV3sPRh8uRVXu/fMstf
|
||||
github.com/xuri/excelize/v2 v2.10.0/go.mod h1:SC5TzhQkaOsTWpANfm+7bJCldzcnU/jrhqkTi/iBHBU=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9 h1:+C0TIdyyYmzadGaL/HBLbf3WdLgC29pgyhTjAT/0nuE=
|
||||
github.com/xuri/nfp v0.0.2-0.20250530014748-2ddeb826f9a9/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.1.0 h1:s7DLGDK45Dyfg7++yxI0khrfwq9661w9EN78eP/UZVs=
|
||||
github.com/zeebo/xxh3 v1.1.0/go.mod h1:IisAie1LELR4xhVinxWS5+zf1lA4p0MW4T+w+W07F5s=
|
||||
go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU=
|
||||
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
|
||||
golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -260,15 +319,25 @@ golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
const dbCachePingInterval = 30 * time.Second
|
||||
@@ -66,23 +67,74 @@ func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Error(err, "关闭数据库连接失败")
|
||||
}
|
||||
}
|
||||
proxytunnel.CloseAllForwarders()
|
||||
// Close all Redis connections
|
||||
CloseAllRedisClients()
|
||||
logger.Infof("资源释放完成,应用已关闭")
|
||||
logger.Close()
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
func normalizeCacheKeyConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := config
|
||||
normalized.Type = strings.ToLower(strings.TrimSpace(normalized.Type))
|
||||
// timeout 仅用于 Query/Ping 控制,不应作为物理连接复用键的一部分。
|
||||
normalized.Timeout = 0
|
||||
normalized.SavePassword = false
|
||||
|
||||
if !normalized.UseSSH {
|
||||
normalized.SSH = connection.SSHConfig{}
|
||||
}
|
||||
// 保持与驱动默认一致,避免同一连接被重复缓存
|
||||
if config.Type == "postgres" && config.Database == "" {
|
||||
config.Database = "postgres"
|
||||
if !normalized.UseProxy {
|
||||
normalized.Proxy = connection.ProxyConfig{}
|
||||
}
|
||||
|
||||
b, _ := json.Marshal(config)
|
||||
if isFileDatabaseType(normalized.Type) {
|
||||
dsn := strings.TrimSpace(normalized.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(normalized.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
|
||||
// DuckDB/SQLite 仅基于文件来源识别连接,其他网络字段不参与键计算。
|
||||
normalized.Host = dsn
|
||||
normalized.Database = ""
|
||||
normalized.Port = 0
|
||||
normalized.User = ""
|
||||
normalized.Password = ""
|
||||
normalized.URI = ""
|
||||
normalized.Hosts = nil
|
||||
normalized.Topology = ""
|
||||
normalized.MySQLReplicaUser = ""
|
||||
normalized.MySQLReplicaPassword = ""
|
||||
normalized.ReplicaSet = ""
|
||||
normalized.AuthSource = ""
|
||||
normalized.ReadPreference = ""
|
||||
normalized.MongoSRV = false
|
||||
normalized.MongoAuthMechanism = ""
|
||||
normalized.MongoReplicaUser = ""
|
||||
normalized.MongoReplicaPassword = ""
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
func resolveFileDatabaseDSN(config connection.ConnectionConfig) string {
|
||||
dsn := strings.TrimSpace(config.Host)
|
||||
if dsn == "" {
|
||||
dsn = strings.TrimSpace(config.Database)
|
||||
}
|
||||
if dsn == "" {
|
||||
dsn = ":memory:"
|
||||
}
|
||||
return dsn
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
normalized := normalizeCacheKeyConfig(config)
|
||||
b, _ := json.Marshal(normalized)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
@@ -175,6 +227,12 @@ func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
if config.UseSSH {
|
||||
b.WriteString(fmt.Sprintf(" SSH=%s:%d 用户=%s", config.SSH.Host, config.SSH.Port, config.SSH.User))
|
||||
}
|
||||
if config.UseProxy {
|
||||
b.WriteString(fmt.Sprintf(" 代理=%s://%s:%d", strings.ToLower(strings.TrimSpace(config.Proxy.Type)), config.Proxy.Host, config.Proxy.Port))
|
||||
if strings.TrimSpace(config.Proxy.User) != "" {
|
||||
b.WriteString(" 代理认证=已配置")
|
||||
}
|
||||
}
|
||||
|
||||
if config.Type == "custom" {
|
||||
driver := strings.TrimSpace(config.Driver)
|
||||
@@ -200,16 +258,51 @@ func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, erro
|
||||
return a.getDatabaseWithPing(config, false)
|
||||
}
|
||||
|
||||
func (a *App) openDatabaseIsolated(config connection.ConnectionConfig) (db.Database, error) {
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
return nil, withLogHint{err: fmt.Errorf("%s", reason), logPath: logger.Path()}
|
||||
}
|
||||
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, proxyErr)
|
||||
}
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
_ = dbInst.Close()
|
||||
return nil, wrapConnectError(effectiveConfig, err)
|
||||
}
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing bool) (db.Database, error) {
|
||||
key := getCacheKey(config)
|
||||
effectiveConfig := applyGlobalProxyToConnection(config)
|
||||
isFileDB := isFileDatabaseType(effectiveConfig.Type)
|
||||
|
||||
key := getCacheKey(effectiveConfig)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
if isFileDB {
|
||||
rawDSN := resolveFileDatabaseDSN(effectiveConfig)
|
||||
normalizedDSN := resolveFileDatabaseDSN(normalizeCacheKeyConfig(effectiveConfig))
|
||||
logger.Infof("文件库连接缓存探测:类型=%s 原始DSN=%s 归一化DSN=%s timeout=%ds forcePing=%t 缓存Key=%s",
|
||||
strings.TrimSpace(effectiveConfig.Type), rawDSN, normalizedDSN, effectiveConfig.Timeout, forcePing, shortKey)
|
||||
}
|
||||
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(config.Type); !supported {
|
||||
if supported, reason := db.DriverRuntimeSupportStatus(effectiveConfig.Type); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(config.Type))
|
||||
reason = fmt.Sprintf("%s 驱动未启用,请先在驱动管理中安装启用", strings.TrimSpace(effectiveConfig.Type))
|
||||
}
|
||||
// Best-effort cleanup: if cached instance exists for this exact config, close it.
|
||||
a.mu.Lock()
|
||||
@@ -225,6 +318,9 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
entry, ok := a.dbCache[key]
|
||||
a.mu.RUnlock()
|
||||
if ok {
|
||||
if isFileDB {
|
||||
logger.Infof("命中文件库连接缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
needPing := forcePing
|
||||
if !needPing {
|
||||
lastPing := entry.lastPing
|
||||
@@ -234,6 +330,9 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
}
|
||||
|
||||
if !needPing {
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(免 Ping):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
}
|
||||
|
||||
@@ -245,9 +344,12 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.dbCache[key] = cur
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("复用文件库连接缓存(Ping 成功):类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return entry.inst, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Error(err, "缓存连接不可用,准备重建:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
}
|
||||
|
||||
// Ping failed: remove cached instance (best effort)
|
||||
@@ -259,19 +361,32 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
a.mu.Unlock()
|
||||
if isFileDB {
|
||||
logger.Infof("文件库缓存连接已剔除,准备新建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
}
|
||||
if isFileDB {
|
||||
logger.Infof("未命中文件库连接缓存,开始创建连接:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(config.Type)
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(effectiveConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", effectiveConfig.Type, shortKey)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(config); err != nil {
|
||||
wrapped := wrapConnectError(config, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
connectConfig, proxyErr := resolveDialConfigWithProxy(effectiveConfig)
|
||||
if proxyErr != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, proxyErr)
|
||||
logger.Error(wrapped, "连接代理准备失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(connectConfig); err != nil {
|
||||
wrapped := wrapConnectError(effectiveConfig, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
@@ -282,11 +397,14 @@ func (a *App) getDatabaseWithPing(config connection.ConnectionConfig, forcePing
|
||||
a.mu.Unlock()
|
||||
// Prefer existing cached connection to avoid cache racing duplicates.
|
||||
_ = dbInst.Close()
|
||||
if isFileDB {
|
||||
logger.Infof("并发创建命中已存在文件库连接,关闭新建连接并复用缓存:类型=%s 缓存Key=%s", strings.TrimSpace(effectiveConfig.Type), shortKey)
|
||||
}
|
||||
return existing.inst, nil
|
||||
}
|
||||
a.dbCache[key] = cachedDatabase{inst: dbInst, lastPing: now}
|
||||
a.mu.Unlock()
|
||||
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(effectiveConfig), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
63
internal/app/app_cache_key_test.go
Normal file
63
internal/app/app_cache_key_test.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestGetCacheKey_IgnoreTimeout(t *testing.T) {
|
||||
base := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `C:\data\songs.duckdb`,
|
||||
Timeout: 30,
|
||||
UseProxy: false,
|
||||
UseSSH: false,
|
||||
}
|
||||
modified := base
|
||||
modified.Timeout = 120
|
||||
|
||||
left := getCacheKey(base)
|
||||
right := getCacheKey(modified)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key when only timeout differs, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_DuckDBHostAndDatabaseEquivalent(t *testing.T) {
|
||||
withHost := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Host: `D:\music\songs.duckdb`,
|
||||
}
|
||||
withDatabase := connection.ConnectionConfig{
|
||||
Type: "duckdb",
|
||||
Database: `D:\music\songs.duckdb`,
|
||||
}
|
||||
|
||||
left := getCacheKey(withHost)
|
||||
right := getCacheKey(withDatabase)
|
||||
if left != right {
|
||||
t.Fatalf("expected same cache key for duckdb host/database path, got %s vs %s", left, right)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCacheKey_KeepDatabaseIsolation(t *testing.T) {
|
||||
a := connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
Host: "127.0.0.1",
|
||||
Port: 3306,
|
||||
User: "root",
|
||||
Password: "root",
|
||||
Database: "db_a",
|
||||
Timeout: 30,
|
||||
}
|
||||
b := a
|
||||
b.Database = "db_b"
|
||||
b.Timeout = 5
|
||||
|
||||
left := getCacheKey(a)
|
||||
right := getCacheKey(b)
|
||||
if left == right {
|
||||
t.Fatalf("expected different cache key for different database targets")
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@ func normalizeRunConfig(config connection.ConnectionConfig, dbName string) conne
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "highgo", "vastbase", "sqlserver", "mongodb", "tdengine", "clickhouse":
|
||||
// 这些类型的 dbName 表示"数据库",需要写入连接配置以选择目标库。
|
||||
runConfig.Database = name
|
||||
case "dameng":
|
||||
|
||||
204
internal/app/db_proxy.go
Normal file
204
internal/app/db_proxy.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
func resolveDialConfigWithProxy(raw connection.ConnectionConfig) (connection.ConnectionConfig, error) {
|
||||
config := raw
|
||||
if !config.UseProxy {
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(config.Proxy)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Proxy = normalizedProxy
|
||||
|
||||
if config.UseSSH {
|
||||
sshPort := config.SSH.Port
|
||||
if sshPort <= 0 {
|
||||
sshPort = 22
|
||||
}
|
||||
forwardedSSH, err := buildProxyForwardAddress(normalizedProxy, strings.TrimSpace(config.SSH.Host), sshPort)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, fmt.Errorf("代理连接 SSH 网关失败:%w", err)
|
||||
}
|
||||
config.SSH.Host = forwardedSSH.host
|
||||
config.SSH.Port = forwardedSSH.port
|
||||
config.UseProxy = false
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
normalizedType := strings.ToLower(strings.TrimSpace(config.Type))
|
||||
if normalizedType == "sqlite" || normalizedType == "duckdb" || normalizedType == "custom" {
|
||||
// 文件型/自定义 DSN 类型不走标准 host:port,不在此层改写。
|
||||
return config, nil
|
||||
}
|
||||
if normalizedType == "mongodb" && config.MongoSRV {
|
||||
// Mongo SRV 由驱动侧 Dialer 处理代理,避免破坏 DNS SRV 拓扑发现。
|
||||
return config, nil
|
||||
}
|
||||
|
||||
targetPort := config.Port
|
||||
if targetPort <= 0 {
|
||||
targetPort = defaultPortByType(normalizedType)
|
||||
}
|
||||
forwardedPrimary, err := buildProxyForwardAddress(normalizedProxy, strings.TrimSpace(config.Host), targetPort)
|
||||
if err != nil {
|
||||
return connection.ConnectionConfig{}, err
|
||||
}
|
||||
config.Host = forwardedPrimary.host
|
||||
config.Port = forwardedPrimary.port
|
||||
|
||||
if len(config.Hosts) > 0 {
|
||||
rewritten := make([]string, 0, len(config.Hosts))
|
||||
seen := make(map[string]struct{}, len(config.Hosts))
|
||||
for _, rawEntry := range config.Hosts {
|
||||
targetHost, targetPort, ok := parseAddressWithDefaultPort(rawEntry, defaultPortByType(normalizedType))
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
forwarded, forwardErr := buildProxyForwardAddress(normalizedProxy, targetHost, targetPort)
|
||||
if forwardErr != nil {
|
||||
return connection.ConnectionConfig{}, forwardErr
|
||||
}
|
||||
rewrittenAddress := formatHostPort(forwarded.host, forwarded.port)
|
||||
if _, exists := seen[rewrittenAddress]; exists {
|
||||
continue
|
||||
}
|
||||
seen[rewrittenAddress] = struct{}{}
|
||||
rewritten = append(rewritten, rewrittenAddress)
|
||||
}
|
||||
config.Hosts = rewritten
|
||||
}
|
||||
|
||||
config.UseProxy = false
|
||||
config.Proxy = connection.ProxyConfig{}
|
||||
return config, nil
|
||||
}
|
||||
|
||||
type hostPort struct {
|
||||
host string
|
||||
port int
|
||||
}
|
||||
|
||||
func buildProxyForwardAddress(proxyConfig connection.ProxyConfig, targetHost string, targetPort int) (hostPort, error) {
|
||||
host := strings.TrimSpace(targetHost)
|
||||
if host == "" {
|
||||
host = "localhost"
|
||||
}
|
||||
port := targetPort
|
||||
if port <= 0 {
|
||||
return hostPort{}, fmt.Errorf("目标端口无效:%d", targetPort)
|
||||
}
|
||||
|
||||
forwarder, err := proxytunnel.GetOrCreateLocalForwarder(proxyConfig, host, port)
|
||||
if err != nil {
|
||||
return hostPort{}, err
|
||||
}
|
||||
localHost, localPort, splitOK := parseAddressWithDefaultPort(forwarder.LocalAddr, 0)
|
||||
if !splitOK || localPort <= 0 {
|
||||
return hostPort{}, fmt.Errorf("解析代理本地转发地址失败:%s", forwarder.LocalAddr)
|
||||
}
|
||||
return hostPort{host: localHost, port: localPort}, nil
|
||||
}
|
||||
|
||||
func parseAddressWithDefaultPort(raw string, defaultPort int) (string, int, bool) {
|
||||
text := strings.TrimSpace(raw)
|
||||
if text == "" {
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
if strings.HasPrefix(text, "[") {
|
||||
if host, portText, err := net.SplitHostPort(text); err == nil {
|
||||
if port, convErr := strconv.Atoi(portText); convErr == nil && port > 0 && port <= 65535 {
|
||||
return strings.TrimSpace(host), port, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
trimmed := strings.Trim(strings.TrimPrefix(text, "["), "]")
|
||||
if trimmed != "" && defaultPort > 0 {
|
||||
return trimmed, defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
if strings.Count(text, ":") == 0 {
|
||||
if defaultPort <= 0 {
|
||||
return "", 0, false
|
||||
}
|
||||
return text, defaultPort, true
|
||||
}
|
||||
|
||||
if strings.Count(text, ":") == 1 {
|
||||
host, portText, err := net.SplitHostPort(text)
|
||||
if err == nil {
|
||||
port, convErr := strconv.Atoi(portText)
|
||||
if convErr == nil && port > 0 && port <= 65535 {
|
||||
return strings.TrimSpace(host), port, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
if defaultPort > 0 {
|
||||
return strings.TrimSpace(text), defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
// IPv6 地址未带端口,使用默认端口。
|
||||
if defaultPort > 0 {
|
||||
return text, defaultPort, true
|
||||
}
|
||||
return "", 0, false
|
||||
}
|
||||
|
||||
func formatHostPort(host string, port int) string {
|
||||
h := strings.TrimSpace(host)
|
||||
if strings.Contains(h, ":") && !strings.HasPrefix(h, "[") {
|
||||
return fmt.Sprintf("[%s]:%d", h, port)
|
||||
}
|
||||
return fmt.Sprintf("%s:%d", h, port)
|
||||
}
|
||||
|
||||
func defaultPortByType(driverType string) int {
|
||||
switch strings.ToLower(strings.TrimSpace(driverType)) {
|
||||
case "mysql", "mariadb":
|
||||
return 3306
|
||||
case "diros":
|
||||
return 9030
|
||||
case "sphinx":
|
||||
return 9306
|
||||
case "postgres", "vastbase":
|
||||
return 5432
|
||||
case "redis":
|
||||
return 6379
|
||||
case "tdengine":
|
||||
return 6041
|
||||
case "oracle":
|
||||
return 1521
|
||||
case "dameng":
|
||||
return 5236
|
||||
case "kingbase":
|
||||
return 54321
|
||||
case "sqlserver":
|
||||
return 1433
|
||||
case "mongodb":
|
||||
return 27017
|
||||
case "clickhouse":
|
||||
return 9000
|
||||
case "highgo":
|
||||
return 5866
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
291
internal/app/global_proxy.go
Normal file
291
internal/app/global_proxy.go
Normal file
@@ -0,0 +1,291 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
)
|
||||
|
||||
type globalProxySnapshot struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Proxy connection.ProxyConfig `json:"proxy"`
|
||||
}
|
||||
|
||||
var globalProxyRuntime = struct {
|
||||
mu sync.RWMutex
|
||||
enabled bool
|
||||
proxy connection.ProxyConfig
|
||||
}{}
|
||||
|
||||
type localProxyTLSFallbackTransport struct {
|
||||
primary *http.Transport
|
||||
fallback *http.Transport
|
||||
proxyEndpoint string
|
||||
}
|
||||
|
||||
func currentGlobalProxyConfig() globalProxySnapshot {
|
||||
globalProxyRuntime.mu.RLock()
|
||||
defer globalProxyRuntime.mu.RUnlock()
|
||||
if !globalProxyRuntime.enabled {
|
||||
return globalProxySnapshot{
|
||||
Enabled: false,
|
||||
Proxy: connection.ProxyConfig{},
|
||||
}
|
||||
}
|
||||
return globalProxySnapshot{
|
||||
Enabled: true,
|
||||
Proxy: globalProxyRuntime.proxy,
|
||||
}
|
||||
}
|
||||
|
||||
func setGlobalProxyConfig(enabled bool, proxyConfig connection.ProxyConfig) (globalProxySnapshot, error) {
|
||||
if !enabled {
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = false
|
||||
globalProxyRuntime.proxy = connection.ProxyConfig{}
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return globalProxySnapshot{}, err
|
||||
}
|
||||
|
||||
globalProxyRuntime.mu.Lock()
|
||||
globalProxyRuntime.enabled = true
|
||||
globalProxyRuntime.proxy = normalizedProxy
|
||||
globalProxyRuntime.mu.Unlock()
|
||||
return currentGlobalProxyConfig(), nil
|
||||
}
|
||||
|
||||
func (a *App) ConfigureGlobalProxy(enabled bool, proxyConfig connection.ProxyConfig) connection.QueryResult {
|
||||
snapshot, err := setGlobalProxyConfig(enabled, proxyConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
if snapshot.Enabled {
|
||||
authState := ""
|
||||
if strings.TrimSpace(snapshot.Proxy.User) != "" {
|
||||
authState = "(认证:已配置)"
|
||||
}
|
||||
logger.Infof(
|
||||
"全局代理已启用:%s://%s:%d%s",
|
||||
strings.ToLower(strings.TrimSpace(snapshot.Proxy.Type)),
|
||||
strings.TrimSpace(snapshot.Proxy.Host),
|
||||
snapshot.Proxy.Port,
|
||||
authState,
|
||||
)
|
||||
} else {
|
||||
logger.Infof("全局代理已关闭")
|
||||
}
|
||||
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "全局代理配置已生效",
|
||||
Data: snapshot,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) GetGlobalProxyConfig() connection.QueryResult {
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: "OK",
|
||||
Data: currentGlobalProxyConfig(),
|
||||
}
|
||||
}
|
||||
|
||||
func applyGlobalProxyToConnection(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
effective := config
|
||||
if effective.UseProxy {
|
||||
return effective
|
||||
}
|
||||
if isFileDatabaseType(effective.Type) {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
effective.Proxy = connection.ProxyConfig{}
|
||||
return effective
|
||||
}
|
||||
|
||||
effective.UseProxy = true
|
||||
effective.Proxy = snapshot.Proxy
|
||||
return effective
|
||||
}
|
||||
|
||||
func isFileDatabaseType(driverType string) bool {
|
||||
switch strings.ToLower(strings.TrimSpace(driverType)) {
|
||||
case "sqlite", "duckdb":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func newHTTPClientWithGlobalProxy(timeout time.Duration) *http.Client {
|
||||
client := &http.Client{
|
||||
Timeout: timeout,
|
||||
}
|
||||
if transport := buildHTTPTransportWithGlobalProxy(); transport != nil {
|
||||
client.Transport = transport
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
func buildHTTPTransportWithGlobalProxy() http.RoundTripper {
|
||||
baseTransport, ok := http.DefaultTransport.(*http.Transport)
|
||||
if !ok || baseTransport == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
transport := baseTransport.Clone()
|
||||
snapshot := currentGlobalProxyConfig()
|
||||
if !snapshot.Enabled {
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
proxyURL, err := buildProxyURLFromConfig(snapshot.Proxy)
|
||||
if err != nil {
|
||||
logger.Warnf("全局代理配置无效,回退系统代理:%v", err)
|
||||
transport.Proxy = http.ProxyFromEnvironment
|
||||
return transport
|
||||
}
|
||||
|
||||
transport.Proxy = http.ProxyURL(proxyURL)
|
||||
if !isLoopbackProxyHost(snapshot.Proxy.Host) {
|
||||
return transport
|
||||
}
|
||||
|
||||
fallbackTransport := transport.Clone()
|
||||
fallbackTransport.TLSClientConfig = cloneTLSConfigWithInsecureSkipVerify(fallbackTransport.TLSClientConfig)
|
||||
return &localProxyTLSFallbackTransport{
|
||||
primary: transport,
|
||||
fallback: fallbackTransport,
|
||||
proxyEndpoint: proxyURL.Redacted(),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *localProxyTLSFallbackTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
resp, err := t.primary.RoundTrip(req)
|
||||
if err == nil {
|
||||
return resp, nil
|
||||
}
|
||||
if !isTLSFallbackCandidate(req.Method, err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
retryReq, cloneErr := cloneRequestForRetry(req)
|
||||
if cloneErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
logger.Warnf("检测到本地代理 TLS 证书不受信任,启用兼容回退:代理=%s 目标=%s 错误=%v", t.proxyEndpoint, req.URL.String(), err)
|
||||
return t.fallback.RoundTrip(retryReq)
|
||||
}
|
||||
|
||||
func isTLSFallbackCandidate(method string, err error) bool {
|
||||
if !isIdempotentRequestMethod(method) {
|
||||
return false
|
||||
}
|
||||
return isUnknownAuthorityError(err)
|
||||
}
|
||||
|
||||
func isIdempotentRequestMethod(method string) bool {
|
||||
switch strings.ToUpper(strings.TrimSpace(method)) {
|
||||
case http.MethodGet, http.MethodHead:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func cloneRequestForRetry(req *http.Request) (*http.Request, error) {
|
||||
cloned := req.Clone(req.Context())
|
||||
if req.Body == nil || req.Body == http.NoBody {
|
||||
return cloned, nil
|
||||
}
|
||||
if req.GetBody == nil {
|
||||
return nil, fmt.Errorf("request body not replayable")
|
||||
}
|
||||
body, err := req.GetBody()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cloned.Body = body
|
||||
return cloned, nil
|
||||
}
|
||||
|
||||
func isUnknownAuthorityError(err error) bool {
|
||||
var unknownErr x509.UnknownAuthorityError
|
||||
if errors.As(err, &unknownErr) {
|
||||
return true
|
||||
}
|
||||
return strings.Contains(strings.ToLower(err.Error()), "x509: certificate signed by unknown authority")
|
||||
}
|
||||
|
||||
func cloneTLSConfigWithInsecureSkipVerify(base *tls.Config) *tls.Config {
|
||||
if base == nil {
|
||||
return &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
cloned := base.Clone()
|
||||
cloned.InsecureSkipVerify = true
|
||||
return cloned
|
||||
}
|
||||
|
||||
func isLoopbackProxyHost(host string) bool {
|
||||
trimmed := strings.TrimSpace(host)
|
||||
if trimmed == "" {
|
||||
return false
|
||||
}
|
||||
if strings.EqualFold(trimmed, "localhost") {
|
||||
return true
|
||||
}
|
||||
ip := net.ParseIP(trimmed)
|
||||
if ip == nil {
|
||||
return false
|
||||
}
|
||||
return ip.IsLoopback()
|
||||
}
|
||||
|
||||
func buildProxyURLFromConfig(proxyConfig connection.ProxyConfig) (*url.URL, error) {
|
||||
normalizedProxy, err := proxytunnel.NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxyType := strings.ToLower(strings.TrimSpace(normalizedProxy.Type))
|
||||
if proxyType != "http" && proxyType != "socks5" {
|
||||
return nil, fmt.Errorf("不支持的代理类型:%s", normalizedProxy.Type)
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.Host) == "" {
|
||||
return nil, fmt.Errorf("代理地址不能为空")
|
||||
}
|
||||
if normalizedProxy.Port <= 0 || normalizedProxy.Port > 65535 {
|
||||
return nil, fmt.Errorf("代理端口无效:%d", normalizedProxy.Port)
|
||||
}
|
||||
|
||||
proxyURL := &url.URL{
|
||||
Scheme: proxyType,
|
||||
Host: net.JoinHostPort(strings.TrimSpace(normalizedProxy.Host), strconv.Itoa(normalizedProxy.Port)),
|
||||
}
|
||||
if strings.TrimSpace(normalizedProxy.User) != "" {
|
||||
proxyURL.User = url.UserPassword(strings.TrimSpace(normalizedProxy.User), normalizedProxy.Password)
|
||||
}
|
||||
return proxyURL, nil
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
)
|
||||
@@ -88,6 +89,8 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%s\"", escapedDbName)
|
||||
} else if dbType == "tdengine" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "clickhouse" {
|
||||
query = fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", quoteIdentByType(dbType, dbName))
|
||||
} else if dbType == "mariadb" || dbType == "diros" {
|
||||
// MariaDB uses same syntax as MySQL
|
||||
} else if dbType == "sphinx" {
|
||||
@@ -110,16 +113,39 @@ func resolveDDLDBType(config connection.ConnectionConfig) string {
|
||||
|
||||
driver := strings.ToLower(strings.TrimSpace(config.Driver))
|
||||
switch driver {
|
||||
case "postgresql":
|
||||
case "postgresql", "postgres", "pg", "pq", "pgx":
|
||||
return "postgres"
|
||||
case "dm":
|
||||
case "dm", "dameng", "dm8":
|
||||
return "dameng"
|
||||
case "sqlite3":
|
||||
case "sqlite3", "sqlite":
|
||||
return "sqlite"
|
||||
case "sphinxql":
|
||||
return "sphinx"
|
||||
case "diros", "doris":
|
||||
return "diros"
|
||||
case "kingbase", "kingbase8", "kingbasees", "kingbasev8":
|
||||
return "kingbase"
|
||||
case "highgo":
|
||||
return "highgo"
|
||||
case "vastbase":
|
||||
return "vastbase"
|
||||
}
|
||||
|
||||
switch {
|
||||
case strings.Contains(driver, "postgres"):
|
||||
return "postgres"
|
||||
case strings.Contains(driver, "kingbase"):
|
||||
return "kingbase"
|
||||
case strings.Contains(driver, "highgo"):
|
||||
return "highgo"
|
||||
case strings.Contains(driver, "vastbase"):
|
||||
return "vastbase"
|
||||
case strings.Contains(driver, "sqlite"):
|
||||
return "sqlite"
|
||||
case strings.Contains(driver, "sphinx"):
|
||||
return "sphinx"
|
||||
case strings.Contains(driver, "diros"), strings.Contains(driver, "doris"):
|
||||
return "diros"
|
||||
default:
|
||||
return driver
|
||||
}
|
||||
@@ -162,7 +188,7 @@ func buildRunConfigForDDL(config connection.ConnectionConfig, dbType string, dbN
|
||||
if strings.EqualFold(strings.TrimSpace(config.Type), "custom") {
|
||||
// custom 连接的 dbName 语义依赖 driver,尽量在常见驱动上对齐内置类型行为。
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "vastbase", "dameng":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "vastbase", "dameng", "clickhouse":
|
||||
if strings.TrimSpace(dbName) != "" {
|
||||
runConfig.Database = strings.TrimSpace(dbName)
|
||||
}
|
||||
@@ -184,15 +210,12 @@ func (a *App) RenameDatabase(config connection.ConnectionConfig, oldName string,
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Diros/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
return connection.QueryResult{Success: false, Message: "MySQL/MariaDB/Doris/Sphinx 不支持直接重命名数据库,请新建库后迁移数据"}
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
if strings.EqualFold(strings.TrimSpace(config.Database), oldName) {
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再重命名"}
|
||||
}
|
||||
runConfig := config
|
||||
if strings.TrimSpace(runConfig.Database) == "" {
|
||||
runConfig.Database = "postgres"
|
||||
}
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -219,7 +242,7 @@ func (a *App) DropDatabase(config connection.ConnectionConfig, dbName string) co
|
||||
sql string
|
||||
)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "tdengine", "clickhouse":
|
||||
runConfig = config
|
||||
runConfig.Database = ""
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
@@ -228,9 +251,6 @@ func (a *App) DropDatabase(config connection.ConnectionConfig, dbName string) co
|
||||
return connection.QueryResult{Success: false, Message: "当前连接正在使用目标数据库,请先连接到其他数据库后再删除"}
|
||||
}
|
||||
runConfig = config
|
||||
if strings.TrimSpace(runConfig.Database) == "" {
|
||||
runConfig.Database = "postgres"
|
||||
}
|
||||
sql = fmt.Sprintf("DROP DATABASE %s", quoteIdentByType(dbType, dbName))
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除数据库", dbType)}
|
||||
@@ -261,7 +281,7 @@ func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, old
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持重命名表", dbType)}
|
||||
}
|
||||
@@ -275,7 +295,7 @@ func (a *App) RenameTable(config connection.ConnectionConfig, dbName string, old
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualifiedTable := quoteTableIdentByType(dbType, schemaName, newTableName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualifiedTable, newQualifiedTable)
|
||||
case "sqlserver":
|
||||
@@ -307,7 +327,7 @@ func (a *App) DropTable(config connection.ConnectionConfig, dbName string, table
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "tdengine", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除表", dbType)}
|
||||
}
|
||||
@@ -410,6 +430,66 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) DBQueryIsolated(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.openDatabaseIsolated(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
defer func() {
|
||||
if closeErr := dbInst.Close(); closeErr != nil {
|
||||
logger.Error(closeErr, "DBQueryIsolated 关闭临时连接失败:%s", formatConnSummary(runConfig))
|
||||
}
|
||||
}()
|
||||
|
||||
query = sanitizeSQLForPgLike(runConfig.Type, query)
|
||||
timeoutSeconds := runConfig.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(time.Duration(timeoutSeconds) * time.Second)
|
||||
defer cancel()
|
||||
|
||||
lowerQuery := strings.TrimSpace(strings.ToLower(query))
|
||||
isReadQuery := strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain")
|
||||
if !isReadQuery && strings.ToLower(strings.TrimSpace(runConfig.Type)) == "mongodb" && strings.HasPrefix(strings.TrimSpace(query), "{") {
|
||||
isReadQuery = true
|
||||
}
|
||||
|
||||
if isReadQuery {
|
||||
var data []map[string]interface{}
|
||||
var columns []string
|
||||
if q, ok := dbInst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
data, columns, err = q.QueryContext(ctx, query)
|
||||
} else {
|
||||
data, columns, err = dbInst.Query(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
}
|
||||
|
||||
var affected int64
|
||||
if e, ok := dbInst.(interface {
|
||||
ExecContext(context.Context, string) (int64, error)
|
||||
}); ok {
|
||||
affected, err = e.ExecContext(ctx, query)
|
||||
} else {
|
||||
affected, err = dbInst.Exec(query)
|
||||
}
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQueryIsolated 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
|
||||
func sqlSnippet(query string) string {
|
||||
q := strings.TrimSpace(query)
|
||||
const max = 200
|
||||
@@ -464,8 +544,8 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
dbType := resolveDDLDBType(config)
|
||||
runConfig := buildRunConfigForDDL(config, dbType, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -473,35 +553,65 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
sqlStr, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
sqlStr, err := resolveCreateStatementWithFallback(dbInst, config, dbName, tableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取建表语句失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if shouldFallbackCreateStatement(dbType, sqlStr) {
|
||||
columns, colErr := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if colErr != nil {
|
||||
logger.Error(colErr, "DBShowCreateTable 兜底加载字段失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: colErr.Error()}
|
||||
}
|
||||
fallbackDDL, buildErr := buildFallbackCreateStatement(dbType, schemaName, pureTableName, columns)
|
||||
if buildErr != nil {
|
||||
logger.Error(buildErr, "DBShowCreateTable 兜底生成 DDL 失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: buildErr.Error()}
|
||||
}
|
||||
sqlStr = fallbackDDL
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: sqlStr}
|
||||
}
|
||||
|
||||
func shouldFallbackCreateStatement(dbType string, ddl string) bool {
|
||||
func resolveCreateStatementWithFallback(dbInst db.Database, config connection.ConnectionConfig, dbName string, tableName string) (string, error) {
|
||||
dbType := resolveDDLDBType(config)
|
||||
schemaName, pureTableName := normalizeSchemaAndTableByType(dbType, dbName, tableName)
|
||||
if pureTableName == "" {
|
||||
return "", fmt.Errorf("表名不能为空")
|
||||
}
|
||||
|
||||
sqlStr, sourceErr := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
if sourceErr == nil && !shouldFallbackCreateStatement(dbType, sqlStr) {
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return sqlStr, nil
|
||||
}
|
||||
|
||||
columns, colErr := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if colErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", colErr
|
||||
}
|
||||
|
||||
fallbackDDL, buildErr := buildFallbackCreateStatement(dbType, schemaName, pureTableName, columns)
|
||||
if buildErr != nil {
|
||||
if sourceErr != nil {
|
||||
return "", sourceErr
|
||||
}
|
||||
return "", buildErr
|
||||
}
|
||||
return fallbackDDL, nil
|
||||
}
|
||||
|
||||
func supportsCreateStatementFallback(dbType string) bool {
|
||||
switch dbType {
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func shouldFallbackCreateStatement(dbType string, ddl string) bool {
|
||||
if !supportsCreateStatementFallback(dbType) {
|
||||
return false
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(ddl)
|
||||
if trimmed == "" {
|
||||
@@ -669,7 +779,7 @@ func (a *App) DropView(config connection.ConnectionConfig, dbName string, viewNa
|
||||
|
||||
dbType := resolveDDLDBType(config)
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "postgres", "kingbase", "sqlite", "duckdb", "oracle", "dameng", "highgo", "vastbase", "sqlserver", "clickhouse":
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前数据源(%s)暂不支持删除视图", dbType)}
|
||||
}
|
||||
@@ -758,7 +868,7 @@ func (a *App) RenameView(config connection.ConnectionConfig, dbName string, oldN
|
||||
|
||||
var sql string
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "clickhouse":
|
||||
newQualified := quoteTableIdentByType(dbType, schemaName, newName)
|
||||
sql = fmt.Sprintf("RENAME TABLE %s TO %s", oldQualified, newQualified)
|
||||
case "postgres", "kingbase", "highgo", "vastbase":
|
||||
|
||||
174
internal/app/methods_db_create_statement_test.go
Normal file
174
internal/app/methods_db_create_statement_test.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeCreateStatementDB struct {
|
||||
createSQL string
|
||||
createErr error
|
||||
columns []connection.ColumnDefinition
|
||||
columnsErr error
|
||||
|
||||
createSchema string
|
||||
createTable string
|
||||
colsSchema string
|
||||
colsTable string
|
||||
}
|
||||
|
||||
func (f *fakeCreateStatementDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeCreateStatementDB) Close() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Ping() error { return nil }
|
||||
func (f *fakeCreateStatementDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
return nil, nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeCreateStatementDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetTables(dbName string) ([]string, error) { return nil, nil }
|
||||
func (f *fakeCreateStatementDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
f.createSchema = dbName
|
||||
f.createTable = tableName
|
||||
return f.createSQL, f.createErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
f.colsSchema = dbName
|
||||
f.colsTable = tableName
|
||||
return f.columns, f.columnsErr
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeCreateStatementDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestResolveDDLDBType_CustomDriverAlias(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
driver string
|
||||
want string
|
||||
}{
|
||||
{name: "postgresql alias", driver: "postgresql", want: "postgres"},
|
||||
{name: "pgx alias", driver: "pgx", want: "postgres"},
|
||||
{name: "kingbase8 alias", driver: "kingbase8", want: "kingbase"},
|
||||
{name: "kingbase contains alias", driver: "kingbasees", want: "kingbase"},
|
||||
{name: "dm alias", driver: "dm8", want: "dameng"},
|
||||
{name: "sqlite alias", driver: "sqlite3", want: "sqlite"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := connection.ConnectionConfig{Type: "custom", Driver: tc.driver}
|
||||
if got := resolveDDLDBType(cfg); got != tc.want {
|
||||
t.Fatalf("resolveDDLDBType() mismatch, want=%q got=%q", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_CustomKingbaseUsesPublicSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "kingbase8",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "public" || dbInst.colsSchema != "public" {
|
||||
t.Fatalf("expected fallback schema public, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with public schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_KeepQualifiedSchema(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "-- SHOW CREATE TABLE not fully supported for PostgreSQL in this MVP.",
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "integer", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "postgresql",
|
||||
}, "demo_db", "sales.orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if dbInst.createSchema != "sales" || dbInst.colsSchema != "sales" {
|
||||
t.Fatalf("expected schema sales, got create=%q columns=%q", dbInst.createSchema, dbInst.colsSchema)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "sales"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL with sales schema, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_NoFallbackForMySQL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createSQL: "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL",
|
||||
columnsErr: errors.New("should not be called"),
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "mysql",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if ddl != dbInst.createSQL {
|
||||
t.Fatalf("expected original ddl for mysql, got: %s", ddl)
|
||||
}
|
||||
if dbInst.colsTable != "" {
|
||||
t.Fatalf("mysql path should not call GetColumns, got table=%q", dbInst.colsTable)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveCreateStatementWithFallback_FallbackWhenCreateStatementError(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbInst := &fakeCreateStatementDB{
|
||||
createErr: errors.New("statement unsupported"),
|
||||
columns: []connection.ColumnDefinition{
|
||||
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
|
||||
},
|
||||
}
|
||||
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
}, "demo_db", "orders")
|
||||
if err != nil {
|
||||
t.Fatalf("resolveCreateStatementWithFallback() unexpected error: %v", err)
|
||||
}
|
||||
if !strings.Contains(ddl, `CREATE TABLE "public"."orders"`) {
|
||||
t.Fatalf("expected fallback DDL for postgres error path, got: %s", ddl)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,12 +2,14 @@ package app
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -15,11 +17,16 @@ import (
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
"github.com/xuri/excelize/v2"
|
||||
)
|
||||
|
||||
const minExportQueryTimeout = 5 * time.Minute
|
||||
const minClickHouseExportQueryTimeout = 2 * time.Hour
|
||||
|
||||
func (a *App) OpenSQLFile() connection.QueryResult {
|
||||
selection, err := runtime.OpenFileDialog(a.ctx, runtime.OpenDialogOptions{
|
||||
Title: "Select SQL File",
|
||||
@@ -120,6 +127,78 @@ func (a *App) SelectSSHKeyFile(currentPath string) connection.QueryResult {
|
||||
return connection.QueryResult{Success: true, Data: map[string]interface{}{"path": selection}}
|
||||
}
|
||||
|
||||
func (a *App) SelectDatabaseFile(currentPath string, driverType string) connection.QueryResult {
|
||||
defaultDir := strings.TrimSpace(currentPath)
|
||||
if defaultDir == "" {
|
||||
if home, err := os.UserHomeDir(); err == nil {
|
||||
defaultDir = home
|
||||
}
|
||||
}
|
||||
if filepath.Ext(defaultDir) != "" {
|
||||
defaultDir = filepath.Dir(defaultDir)
|
||||
}
|
||||
if defaultDir != "" && !filepath.IsAbs(defaultDir) {
|
||||
if abs, err := filepath.Abs(defaultDir); err == nil {
|
||||
defaultDir = abs
|
||||
}
|
||||
}
|
||||
|
||||
normalizedType := strings.ToLower(strings.TrimSpace(driverType))
|
||||
filters := []runtime.FileFilter{
|
||||
{
|
||||
DisplayName: "数据库文件",
|
||||
Pattern: "*.db;*.sqlite;*.sqlite3;*.db3;*.duckdb;*.ddb",
|
||||
},
|
||||
{
|
||||
DisplayName: "所有文件",
|
||||
Pattern: "*",
|
||||
},
|
||||
}
|
||||
title := "选择数据库文件"
|
||||
switch normalizedType {
|
||||
case "sqlite":
|
||||
title = "选择 SQLite 数据文件"
|
||||
filters = []runtime.FileFilter{
|
||||
{
|
||||
DisplayName: "SQLite 文件",
|
||||
Pattern: "*.db;*.sqlite;*.sqlite3;*.db3",
|
||||
},
|
||||
{
|
||||
DisplayName: "所有文件",
|
||||
Pattern: "*",
|
||||
},
|
||||
}
|
||||
case "duckdb":
|
||||
title = "选择 DuckDB 数据文件"
|
||||
filters = []runtime.FileFilter{
|
||||
{
|
||||
DisplayName: "DuckDB 文件",
|
||||
Pattern: "*.duckdb;*.ddb;*.db",
|
||||
},
|
||||
{
|
||||
DisplayName: "所有文件",
|
||||
Pattern: "*",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
selection, err := runtime.OpenFileDialog(a.ctx, runtime.OpenDialogOptions{
|
||||
Title: title,
|
||||
DefaultDirectory: defaultDir,
|
||||
Filters: filters,
|
||||
})
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
if strings.TrimSpace(selection) == "" {
|
||||
return connection.QueryResult{Success: false, Message: "Cancelled"}
|
||||
}
|
||||
if abs, err := filepath.Abs(selection); err == nil {
|
||||
selection = abs
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]interface{}{"path": selection}}
|
||||
}
|
||||
|
||||
// PreviewImportFile 解析导入文件,返回字段列表、总行数、前 5 行预览数据
|
||||
func (a *App) PreviewImportFile(filePath string) connection.QueryResult {
|
||||
if filePath == "" {
|
||||
@@ -541,7 +620,7 @@ func (a *App) ExportTable(config connection.ConnectionConfig, dbName string, tab
|
||||
|
||||
query := fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(runConfig.Type, tableName))
|
||||
|
||||
data, columns, err := dbInst.Query(query)
|
||||
data, columns, err := queryDataForExport(dbInst, runConfig, query)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -701,7 +780,7 @@ func quoteIdentByType(dbType string, ident string) string {
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "mysql", "mariadb", "diros", "sphinx", "tdengine":
|
||||
case "mysql", "mariadb", "diros", "sphinx", "tdengine", "clickhouse":
|
||||
return "`" + strings.ReplaceAll(ident, "`", "``") + "`"
|
||||
case "sqlserver":
|
||||
escaped := strings.ReplaceAll(ident, "]", "]]")
|
||||
@@ -872,7 +951,7 @@ func listViewNameLookup(dbInst db.Database, config connection.ConnectionConfig,
|
||||
if strings.TrimSpace(query) == "" {
|
||||
continue
|
||||
}
|
||||
rows, _, err := dbInst.Query(query)
|
||||
rows, _, err := queryDataForExport(dbInst, config, query)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
@@ -950,6 +1029,15 @@ func buildListViewQueries(config connection.ConnectionConfig, dbName string) []s
|
||||
return []string{
|
||||
`SELECT table_schema AS schema_name, table_name AS object_name FROM information_schema.views WHERE table_schema NOT IN ('information_schema', 'pg_catalog') ORDER BY table_schema, table_name`,
|
||||
}
|
||||
case "clickhouse":
|
||||
if strings.TrimSpace(dbName) == "" {
|
||||
return []string{
|
||||
`SELECT database AS schema_name, name AS object_name FROM system.tables WHERE engine LIKE '%View%' ORDER BY database, name`,
|
||||
}
|
||||
}
|
||||
return []string{
|
||||
fmt.Sprintf(`SELECT database AS schema_name, name AS object_name FROM system.tables WHERE engine LIKE '%%View%%' AND database='%s' ORDER BY name`, escapedDbName),
|
||||
}
|
||||
default:
|
||||
if strings.TrimSpace(dbName) == "" {
|
||||
return []string{
|
||||
@@ -974,7 +1062,7 @@ func tryGetViewCreateStatement(
|
||||
if strings.TrimSpace(query) == "" {
|
||||
continue
|
||||
}
|
||||
rows, _, err := dbInst.Query(query)
|
||||
rows, _, err := queryDataForExport(dbInst, config, query)
|
||||
if err != nil || len(rows) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -1070,6 +1158,18 @@ WHERE s.name = '%s' AND v.name = '%s'`,
|
||||
fmt.Sprintf("SELECT sql AS ddl FROM duckdb_views() WHERE view_name = '%s' AND schema_name = '%s' LIMIT 1", escapedView, escapedSchema),
|
||||
fmt.Sprintf("SELECT view_definition AS ddl FROM information_schema.views WHERE table_name = '%s' AND table_schema = '%s' LIMIT 1", escapedView, escapedSchema),
|
||||
}
|
||||
case "clickhouse":
|
||||
if safeSchema == "" {
|
||||
safeSchema = strings.TrimSpace(dbName)
|
||||
}
|
||||
if safeSchema != "" {
|
||||
return []string{
|
||||
fmt.Sprintf("SHOW CREATE TABLE %s.%s", quoteIdentByType("clickhouse", safeSchema), quoteIdentByType("clickhouse", safeView)),
|
||||
}
|
||||
}
|
||||
return []string{
|
||||
fmt.Sprintf("SHOW CREATE TABLE %s", quoteIdentByType("clickhouse", safeView)),
|
||||
}
|
||||
default:
|
||||
if safeSchema != "" {
|
||||
return []string{
|
||||
@@ -1270,7 +1370,7 @@ func dumpTableSQL(
|
||||
createSQL = ddl
|
||||
}
|
||||
} else {
|
||||
ddl, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
ddl, err := resolveCreateStatementWithFallback(dbInst, config, dbName, tableName)
|
||||
if err != nil {
|
||||
if viewDDL, ok := tryGetViewCreateStatement(dbInst, config, dbName, schemaName, pureTableName); ok {
|
||||
createSQL = viewDDL
|
||||
@@ -1327,7 +1427,7 @@ func dumpTableSQL(
|
||||
|
||||
qualified := qualifyTable(schemaName, pureTableName)
|
||||
selectSQL := fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.Type, qualified))
|
||||
data, columns, err := dbInst.Query(selectSQL)
|
||||
data, columns, err := queryDataForExport(dbInst, config, selectSQL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1362,14 +1462,17 @@ func (a *App) ExportData(data []map[string]interface{}, columns []string, defaul
|
||||
if defaultName == "" {
|
||||
defaultName = "export"
|
||||
}
|
||||
logger.Infof("ExportData 开始:rows=%d cols=%d format=%s defaultName=%s", len(data), len(columns), strings.ToLower(strings.TrimSpace(format)), strings.TrimSpace(defaultName))
|
||||
filename, err := runtime.SaveFileDialog(a.ctx, runtime.SaveDialogOptions{
|
||||
Title: "Export Data",
|
||||
DefaultFilename: fmt.Sprintf("%s.%s", defaultName, strings.ToLower(format)),
|
||||
})
|
||||
|
||||
if err != nil || filename == "" {
|
||||
logger.Infof("ExportData 已取消或未选择文件:err=%v", err)
|
||||
return connection.QueryResult{Success: false, Message: "Cancelled"}
|
||||
}
|
||||
logger.Infof("ExportData 选定文件:%s", filename)
|
||||
|
||||
f, err := os.Create(filename)
|
||||
if err != nil {
|
||||
@@ -1377,9 +1480,11 @@ func (a *App) ExportData(data []map[string]interface{}, columns []string, defaul
|
||||
}
|
||||
defer f.Close()
|
||||
if err := writeRowsToFile(f, data, columns, format); err != nil {
|
||||
logger.Warnf("ExportData 写入失败:file=%s err=%v", filename, err)
|
||||
return connection.QueryResult{Success: false, Message: "Write error: " + err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("ExportData 完成:file=%s rows=%d", filename, len(data))
|
||||
return connection.QueryResult{Success: true, Message: "Export successful"}
|
||||
}
|
||||
|
||||
@@ -1400,8 +1505,10 @@ func (a *App) ExportQuery(config connection.ConnectionConfig, dbName string, que
|
||||
DefaultFilename: fmt.Sprintf("%s.%s", defaultName, strings.ToLower(format)),
|
||||
})
|
||||
if err != nil || filename == "" {
|
||||
logger.Infof("ExportQuery 已取消或未选择文件:err=%v", err)
|
||||
return connection.QueryResult{Success: false, Message: "Cancelled"}
|
||||
}
|
||||
logger.Infof("ExportQuery 开始:type=%s db=%s format=%s file=%s sql=%q", strings.TrimSpace(config.Type), strings.TrimSpace(dbName), strings.ToLower(strings.TrimSpace(format)), filename, sqlSnippet(query))
|
||||
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
@@ -1415,8 +1522,9 @@ func (a *App) ExportQuery(config connection.ConnectionConfig, dbName string, que
|
||||
return connection.QueryResult{Success: false, Message: "Only SELECT/WITH queries are supported"}
|
||||
}
|
||||
|
||||
data, columns, err := dbInst.Query(query)
|
||||
data, columns, err := queryDataForExport(dbInst, runConfig, query)
|
||||
if err != nil {
|
||||
logger.Warnf("ExportQuery 查询失败:type=%s db=%s err=%v sql=%q", strings.TrimSpace(config.Type), strings.TrimSpace(dbName), err, sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
@@ -1427,12 +1535,55 @@ func (a *App) ExportQuery(config connection.ConnectionConfig, dbName string, que
|
||||
defer f.Close()
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, format); err != nil {
|
||||
logger.Warnf("ExportQuery 写入失败:file=%s err=%v", filename, err)
|
||||
return connection.QueryResult{Success: false, Message: "Write error: " + err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("ExportQuery 完成:file=%s rows=%d cols=%d", filename, len(data), len(columns))
|
||||
return connection.QueryResult{Success: true, Message: "Export successful"}
|
||||
}
|
||||
|
||||
func queryDataForExport(dbInst db.Database, config connection.ConnectionConfig, query string) ([]map[string]interface{}, []string, error) {
|
||||
timeout := getExportQueryTimeout(config)
|
||||
dbType := resolveDDLDBType(config)
|
||||
if dbType == "clickhouse" {
|
||||
logger.Infof("ClickHouse 导出查询开始:timeout=%s SQL片段=%q", timeout, sqlSnippet(query))
|
||||
}
|
||||
if q, ok := dbInst.(interface {
|
||||
QueryContext(context.Context, string) ([]map[string]interface{}, []string, error)
|
||||
}); ok {
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
data, columns, err := q.QueryContext(ctx, query)
|
||||
if err != nil && dbType == "clickhouse" {
|
||||
logger.Warnf("ClickHouse 导出查询失败:timeout=%s SQL片段=%q err=%v", timeout, sqlSnippet(query), err)
|
||||
}
|
||||
return data, columns, err
|
||||
}
|
||||
data, columns, err := dbInst.Query(query)
|
||||
if err != nil && dbType == "clickhouse" {
|
||||
logger.Warnf("ClickHouse 导出查询失败(无 QueryContext):timeout=%s SQL片段=%q err=%v", timeout, sqlSnippet(query), err)
|
||||
}
|
||||
return data, columns, err
|
||||
}
|
||||
|
||||
func getExportQueryTimeout(config connection.ConnectionConfig) time.Duration {
|
||||
timeout := time.Duration(config.Timeout) * time.Second
|
||||
if timeout <= 0 {
|
||||
timeout = minExportQueryTimeout
|
||||
}
|
||||
if resolveDDLDBType(config) == "clickhouse" {
|
||||
if timeout < minClickHouseExportQueryTimeout {
|
||||
timeout = minClickHouseExportQueryTimeout
|
||||
}
|
||||
return timeout
|
||||
}
|
||||
if timeout < minExportQueryTimeout {
|
||||
timeout = minExportQueryTimeout
|
||||
}
|
||||
return timeout
|
||||
}
|
||||
|
||||
func writeRowsToFile(f *os.File, data []map[string]interface{}, columns []string, format string) error {
|
||||
format = strings.ToLower(strings.TrimSpace(format))
|
||||
if f == nil {
|
||||
@@ -1506,7 +1657,11 @@ func writeRowsToFile(f *os.File, data []map[string]interface{}, columns []string
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := jsonEncoder.Encode(rowMap); err != nil {
|
||||
exportedRow := make(map[string]interface{}, len(columns))
|
||||
for _, col := range columns {
|
||||
exportedRow[col] = normalizeExportJSONValue(rowMap[col])
|
||||
}
|
||||
if err := jsonEncoder.Encode(exportedRow); err != nil {
|
||||
return err
|
||||
}
|
||||
isJsonFirstRow = false
|
||||
@@ -1546,11 +1701,102 @@ func formatExportCellText(val interface{}) string {
|
||||
return "NULL"
|
||||
}
|
||||
return v.Format("2006-01-02 15:04:05")
|
||||
case float32:
|
||||
f := float64(v)
|
||||
if math.IsNaN(f) || math.IsInf(f, 0) {
|
||||
return "NULL"
|
||||
}
|
||||
return strconv.FormatFloat(f, 'f', -1, 32)
|
||||
case float64:
|
||||
if math.IsNaN(v) || math.IsInf(v, 0) {
|
||||
return "NULL"
|
||||
}
|
||||
return strconv.FormatFloat(v, 'f', -1, 64)
|
||||
case json.Number:
|
||||
text := strings.TrimSpace(v.String())
|
||||
if text == "" {
|
||||
return "NULL"
|
||||
}
|
||||
return text
|
||||
default:
|
||||
return fmt.Sprintf("%v", val)
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeExportJSONValue(val interface{}) interface{} {
|
||||
if val == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v := val.(type) {
|
||||
case float32:
|
||||
f := float64(v)
|
||||
if math.IsNaN(f) || math.IsInf(f, 0) {
|
||||
return nil
|
||||
}
|
||||
return json.Number(strconv.FormatFloat(f, 'f', -1, 32))
|
||||
case float64:
|
||||
if math.IsNaN(v) || math.IsInf(v, 0) {
|
||||
return nil
|
||||
}
|
||||
return json.Number(strconv.FormatFloat(v, 'f', -1, 64))
|
||||
case json.Number:
|
||||
text := strings.TrimSpace(v.String())
|
||||
if text == "" {
|
||||
return nil
|
||||
}
|
||||
return json.Number(text)
|
||||
case map[string]interface{}:
|
||||
out := make(map[string]interface{}, len(v))
|
||||
for key, item := range v {
|
||||
out[key] = normalizeExportJSONValue(item)
|
||||
}
|
||||
return out
|
||||
case []interface{}:
|
||||
items := make([]interface{}, len(v))
|
||||
for i, item := range v {
|
||||
items[i] = normalizeExportJSONValue(item)
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(val)
|
||||
switch rv.Kind() {
|
||||
case reflect.Pointer, reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return normalizeExportJSONValue(rv.Elem().Interface())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
out := make(map[string]interface{}, rv.Len())
|
||||
iter := rv.MapRange()
|
||||
for iter.Next() {
|
||||
out[fmt.Sprint(iter.Key().Interface())] = normalizeExportJSONValue(iter.Value().Interface())
|
||||
}
|
||||
return out
|
||||
case reflect.Slice:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
if rv.Type().Elem().Kind() == reflect.Uint8 {
|
||||
return val
|
||||
}
|
||||
fallthrough
|
||||
case reflect.Array:
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeExportJSONValue(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
default:
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
// writeRowsToXlsx 使用 excelize 写入真正的 xlsx 格式文件
|
||||
func writeRowsToXlsx(filename string, data []map[string]interface{}, columns []string) error {
|
||||
xlsx := excelize.NewFile()
|
||||
|
||||
205
internal/app/methods_file_export_test.go
Normal file
205
internal/app/methods_file_export_test.go
Normal file
@@ -0,0 +1,205 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
type fakeExportQueryDB struct {
|
||||
data []map[string]interface{}
|
||||
cols []string
|
||||
err error
|
||||
|
||||
lastQuery string
|
||||
lastContextTimeout time.Duration
|
||||
hasContextDeadline bool
|
||||
}
|
||||
|
||||
func (f *fakeExportQueryDB) Connect(config connection.ConnectionConfig) error { return nil }
|
||||
func (f *fakeExportQueryDB) Close() error { return nil }
|
||||
func (f *fakeExportQueryDB) Ping() error { return nil }
|
||||
func (f *fakeExportQueryDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
f.lastQuery = query
|
||||
if deadline, ok := ctx.Deadline(); ok {
|
||||
f.hasContextDeadline = true
|
||||
f.lastContextTimeout = time.Until(deadline)
|
||||
}
|
||||
return f.data, f.cols, f.err
|
||||
}
|
||||
func (f *fakeExportQueryDB) Exec(query string) (int64, error) { return 0, nil }
|
||||
func (f *fakeExportQueryDB) GetDatabases() ([]string, error) { return nil, nil }
|
||||
func (f *fakeExportQueryDB) GetTables(dbName string) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (f *fakeExportQueryDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestFormatExportCellText_FloatNoScientificNotation(t *testing.T) {
|
||||
got := formatExportCellText(1.445663e+06)
|
||||
if strings.Contains(strings.ToLower(got), "e+") || strings.Contains(strings.ToLower(got), "e-") {
|
||||
t.Fatalf("不应输出科学计数法,got=%q", got)
|
||||
}
|
||||
if got != "1445663" {
|
||||
t.Fatalf("浮点整值导出异常,want=%q got=%q", "1445663", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_Markdown_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.md")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "md"); err != nil {
|
||||
t.Fatalf("写入 md 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 md 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("md 导出包含科学计数法: %s", content)
|
||||
}
|
||||
if !strings.Contains(content, "| 1445663 |") {
|
||||
t.Fatalf("md 导出未保留整数字面量,content=%s", content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteRowsToFile_JSON_NumberKeepPlainText(t *testing.T) {
|
||||
f, err := os.CreateTemp("", "gonavi-export-*.json")
|
||||
if err != nil {
|
||||
t.Fatalf("创建临时文件失败: %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
defer f.Close()
|
||||
|
||||
data := []map[string]interface{}{
|
||||
{"id": 1.445663e+06},
|
||||
}
|
||||
columns := []string{"id"}
|
||||
|
||||
if err := writeRowsToFile(f, data, columns, "json"); err != nil {
|
||||
t.Fatalf("写入 json 失败: %v", err)
|
||||
}
|
||||
|
||||
contentBytes, err := os.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("读取 json 失败: %v", err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
if strings.Contains(strings.ToLower(content), "e+") || strings.Contains(strings.ToLower(content), "e-") {
|
||||
t.Fatalf("json 导出包含科学计数法: %s", content)
|
||||
}
|
||||
|
||||
var decoded []map[string]json.Number
|
||||
decoder := json.NewDecoder(bytes.NewReader(contentBytes))
|
||||
decoder.UseNumber()
|
||||
if err := decoder.Decode(&decoded); err != nil {
|
||||
t.Fatalf("解析导出 json 失败: %v", err)
|
||||
}
|
||||
if len(decoded) != 1 {
|
||||
t.Fatalf("导出行数异常,got=%d", len(decoded))
|
||||
}
|
||||
if decoded[0]["id"].String() != "1445663" {
|
||||
t.Fatalf("json 数值格式异常,want=1445663 got=%s", decoded[0]["id"].String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesMinimumTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 10}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
if fake.lastQuery != "SELECT 1" {
|
||||
t.Fatalf("queryDataForExport 查询语句异常,want=%q got=%q", "SELECT 1", fake.lastQuery)
|
||||
}
|
||||
lowerBound := minExportQueryTimeout - 5*time.Second
|
||||
upperBound := minExportQueryTimeout + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出最小超时异常,want≈%s got=%s", minExportQueryTimeout, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryDataForExport_UsesLargerConfiguredTimeout(t *testing.T) {
|
||||
fake := &fakeExportQueryDB{
|
||||
data: []map[string]interface{}{{"v": 1}},
|
||||
cols: []string{"v"},
|
||||
}
|
||||
_, _, err := queryDataForExport(fake, connection.ConnectionConfig{Timeout: 900}, "SELECT 1")
|
||||
if err != nil {
|
||||
t.Fatalf("queryDataForExport 返回错误: %v", err)
|
||||
}
|
||||
if !fake.hasContextDeadline {
|
||||
t.Fatal("queryDataForExport 应设置 context deadline")
|
||||
}
|
||||
expected := 900 * time.Second
|
||||
lowerBound := expected - 5*time.Second
|
||||
upperBound := expected + 5*time.Second
|
||||
if fake.lastContextTimeout < lowerBound || fake.lastContextTimeout > upperBound {
|
||||
t.Fatalf("导出配置超时异常,want≈%s got=%s", expected, fake.lastContextTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_ClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExportQueryTimeout_CustomClickHouseUsesLongerMinimum(t *testing.T) {
|
||||
timeout := getExportQueryTimeout(connection.ConnectionConfig{
|
||||
Type: "custom",
|
||||
Driver: "clickhouse",
|
||||
Timeout: 30,
|
||||
})
|
||||
if timeout != minClickHouseExportQueryTimeout {
|
||||
t.Fatalf("custom clickhouse 导出超时下限异常,want=%s got=%s", minClickHouseExportQueryTimeout, timeout)
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,9 @@ import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
@@ -64,24 +67,27 @@ func getRedisClientCacheKey(config connection.ConnectionConfig) string {
|
||||
}
|
||||
|
||||
func formatRedisConnSummary(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString("类型=redis 地址=")
|
||||
b.WriteString(config.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.Port))
|
||||
if topology := strings.TrimSpace(config.Topology); topology != "" {
|
||||
b.WriteString(" 模式=")
|
||||
b.WriteString(topology)
|
||||
}
|
||||
if len(config.Hosts) > 0 {
|
||||
b.WriteString(" 节点数=")
|
||||
b.WriteString(strconv.Itoa(len(config.Hosts)))
|
||||
}
|
||||
b.WriteString(" DB=")
|
||||
b.WriteString(string(rune(config.RedisDB + '0')))
|
||||
b.WriteString(strconv.Itoa(config.RedisDB))
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(" SSH=")
|
||||
b.WriteString(config.SSH.Host)
|
||||
b.WriteString(":")
|
||||
b.WriteString(string(rune(config.SSH.Port + '0')))
|
||||
b.WriteString(strconv.Itoa(config.SSH.Port))
|
||||
b.WriteString(" 用户=")
|
||||
b.WriteString(config.SSH.User)
|
||||
}
|
||||
@@ -107,14 +113,20 @@ func (a *App) RedisTestConnection(config connection.ConnectionConfig) connection
|
||||
}
|
||||
|
||||
// RedisScanKeys scans keys matching a pattern
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor uint64, count int64) connection.QueryResult {
|
||||
func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string, cursor any, count int64) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
client, err := a.getRedisClient(config)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, cursor, count)
|
||||
parsedCursor, err := parseRedisScanCursor(cursor)
|
||||
if err != nil {
|
||||
logger.Warnf("RedisScanKeys 游标解析失败,已回退到起始游标:cursor=%v err=%v", cursor, err)
|
||||
parsedCursor = 0
|
||||
}
|
||||
|
||||
result, err := client.ScanKeys(pattern, parsedCursor, count)
|
||||
if err != nil {
|
||||
logger.Error(err, "RedisScanKeys 扫描失败:pattern=%s", pattern)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -123,6 +135,82 @@ func (a *App) RedisScanKeys(config connection.ConnectionConfig, pattern string,
|
||||
return connection.QueryResult{Success: true, Data: result}
|
||||
}
|
||||
|
||||
func parseRedisScanCursor(cursor any) (uint64, error) {
|
||||
switch v := cursor.(type) {
|
||||
case nil:
|
||||
return 0, nil
|
||||
case uint64:
|
||||
return v, nil
|
||||
case uint32:
|
||||
return uint64(v), nil
|
||||
case uint16:
|
||||
return uint64(v), nil
|
||||
case uint8:
|
||||
return uint64(v), nil
|
||||
case uint:
|
||||
return uint64(v), nil
|
||||
case int64:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int32:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int16:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int8:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case int:
|
||||
if v < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %d", v)
|
||||
}
|
||||
return uint64(v), nil
|
||||
case float64:
|
||||
return parseRedisScanCursorFromFloat(v)
|
||||
case float32:
|
||||
return parseRedisScanCursorFromFloat(float64(v))
|
||||
case json.Number:
|
||||
return parseRedisScanCursor(strings.TrimSpace(v.String()))
|
||||
case string:
|
||||
trimmed := strings.TrimSpace(v)
|
||||
if trimmed == "" {
|
||||
return 0, nil
|
||||
}
|
||||
parsed, err := strconv.ParseUint(trimmed, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("无效游标: %q", v)
|
||||
}
|
||||
return parsed, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("不支持的游标类型: %T", cursor)
|
||||
}
|
||||
}
|
||||
|
||||
func parseRedisScanCursorFromFloat(value float64) (uint64, error) {
|
||||
if math.IsNaN(value) || math.IsInf(value, 0) {
|
||||
return 0, fmt.Errorf("无效浮点游标: %v", value)
|
||||
}
|
||||
if value < 0 {
|
||||
return 0, fmt.Errorf("游标不能为负数: %v", value)
|
||||
}
|
||||
if math.Trunc(value) != value {
|
||||
return 0, fmt.Errorf("游标必须为整数: %v", value)
|
||||
}
|
||||
if value > float64(math.MaxUint64) {
|
||||
return 0, fmt.Errorf("游标超出范围: %v", value)
|
||||
}
|
||||
return uint64(value), nil
|
||||
}
|
||||
|
||||
// RedisGetValue gets the value of a key
|
||||
func (a *App) RedisGetValue(config connection.ConnectionConfig, key string) connection.QueryResult {
|
||||
config.Type = "redis"
|
||||
|
||||
50
internal/app/methods_redis_cursor_test.go
Normal file
50
internal/app/methods_redis_cursor_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseRedisScanCursor(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
input any
|
||||
want uint64
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "nil defaults to zero", input: nil, want: 0},
|
||||
{name: "empty string defaults to zero", input: " ", want: 0},
|
||||
{name: "string cursor", input: "123", want: 123},
|
||||
{name: "uint64 cursor", input: uint64(456), want: 456},
|
||||
{name: "int cursor", input: int(789), want: 789},
|
||||
{name: "float cursor", input: float64(42), want: 42},
|
||||
{name: "json number cursor", input: json.Number("88"), want: 88},
|
||||
{name: "negative int rejected", input: -1, wantErr: true},
|
||||
{name: "fraction float rejected", input: float64(1.5), wantErr: true},
|
||||
{name: "invalid string rejected", input: "abc", wantErr: true},
|
||||
{name: "unsupported type rejected", input: true, wantErr: true},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := parseRedisScanCursor(tc.input)
|
||||
if tc.wantErr {
|
||||
if err == nil {
|
||||
t.Fatalf("expected error, got nil (value=%d)", got)
|
||||
}
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if got != tc.want {
|
||||
t.Fatalf("parseRedisScanCursor() mismatch, want=%d got=%d", tc.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -233,6 +233,49 @@ func (a *App) InstallUpdateAndRestart() connection.QueryResult {
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) OpenDownloadedUpdateDirectory() connection.QueryResult {
|
||||
a.updateMu.Lock()
|
||||
staged := a.updateState.staged
|
||||
a.updateMu.Unlock()
|
||||
if staged == nil {
|
||||
return connection.QueryResult{Success: false, Message: "未找到已下载的更新包"}
|
||||
}
|
||||
assetPath := strings.TrimSpace(staged.FilePath)
|
||||
if assetPath == "" {
|
||||
return connection.QueryResult{Success: false, Message: "更新包路径为空"}
|
||||
}
|
||||
dirPath := strings.TrimSpace(filepath.Dir(assetPath))
|
||||
if dirPath == "" || dirPath == "." {
|
||||
return connection.QueryResult{Success: false, Message: "无法解析更新目录"}
|
||||
}
|
||||
if stat, err := os.Stat(dirPath); err != nil || !stat.IsDir() {
|
||||
return connection.QueryResult{Success: false, Message: "更新目录不存在或不可访问"}
|
||||
}
|
||||
|
||||
var cmd *exec.Cmd
|
||||
switch stdRuntime.GOOS {
|
||||
case "darwin":
|
||||
cmd = exec.Command("open", dirPath)
|
||||
case "windows":
|
||||
cmd = exec.Command("explorer", dirPath)
|
||||
case "linux":
|
||||
cmd = exec.Command("xdg-open", dirPath)
|
||||
default:
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("当前平台暂不支持打开目录:%s", stdRuntime.GOOS)}
|
||||
}
|
||||
if err := cmd.Start(); err != nil {
|
||||
logger.Error(err, "打开更新目录失败")
|
||||
return connection.QueryResult{Success: false, Message: fmt.Sprintf("打开更新目录失败:%v", err)}
|
||||
}
|
||||
return connection.QueryResult{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("已打开安装目录:%s", dirPath),
|
||||
Data: map[string]any{
|
||||
"path": dirPath,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) downloadAndStageUpdate(info UpdateInfo) connection.QueryResult {
|
||||
workspaceDir := strings.TrimSpace(resolveUpdateWorkspaceDir(info.LatestVersion))
|
||||
if workspaceDir == "" {
|
||||
@@ -374,7 +417,7 @@ func getCurrentAuthor() string {
|
||||
}
|
||||
|
||||
func fetchLatestRelease() (*githubRelease, error) {
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
client := newHTTPClientWithGlobalProxy(15 * time.Second)
|
||||
req, err := http.NewRequest(http.MethodGet, updateAPIURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -451,7 +494,7 @@ func fetchReleaseSHA256(assets []githubAsset) (map[string]string, error) {
|
||||
return nil, errors.New("Release 未提供 SHA256SUMS")
|
||||
}
|
||||
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
client := newHTTPClientWithGlobalProxy(15 * time.Second)
|
||||
req, err := http.NewRequest(http.MethodGet, checksumURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -522,7 +565,7 @@ func (w *downloadProgressWriter) Write(p []byte) (int, error) {
|
||||
}
|
||||
|
||||
func downloadFileWithHash(url, filePath string, onProgress func(downloaded, total int64)) (string, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Minute}
|
||||
client := newHTTPClientWithGlobalProxy(10 * time.Minute)
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -9,33 +9,44 @@ type SSHConfig struct {
|
||||
KeyPath string `json:"keyPath"`
|
||||
}
|
||||
|
||||
// ProxyConfig holds proxy connection details
|
||||
type ProxyConfig struct {
|
||||
Type string `json:"type"` // socks5 | http
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user,omitempty"`
|
||||
Password string `json:"password,omitempty"`
|
||||
}
|
||||
|
||||
// ConnectionConfig holds database connection details including SSH
|
||||
type ConnectionConfig struct {
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
Type string `json:"type"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
SavePassword bool `json:"savePassword,omitempty"` // Persist password in saved connection
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
UseProxy bool `json:"useProxy,omitempty"`
|
||||
Proxy ProxyConfig `json:"proxy,omitempty"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
RedisDB int `json:"redisDB,omitempty"` // Redis database index (0-15)
|
||||
URI string `json:"uri,omitempty"` // Connection URI for copy/paste
|
||||
Hosts []string `json:"hosts,omitempty"` // Multi-host addresses: host:port
|
||||
Topology string `json:"topology,omitempty"` // single | replica | cluster
|
||||
MySQLReplicaUser string `json:"mysqlReplicaUser,omitempty"` // MySQL replica auth user
|
||||
MySQLReplicaPassword string `json:"mysqlReplicaPassword,omitempty"` // MySQL replica auth password
|
||||
ReplicaSet string `json:"replicaSet,omitempty"` // MongoDB replica set name
|
||||
AuthSource string `json:"authSource,omitempty"` // MongoDB authSource
|
||||
ReadPreference string `json:"readPreference,omitempty"` // MongoDB readPreference
|
||||
MongoSRV bool `json:"mongoSrv,omitempty"` // MongoDB use mongodb+srv URI scheme
|
||||
MongoAuthMechanism string `json:"mongoAuthMechanism,omitempty"` // MongoDB authMechanism
|
||||
MongoReplicaUser string `json:"mongoReplicaUser,omitempty"` // MongoDB replica auth user
|
||||
MongoReplicaPassword string `json:"mongoReplicaPassword,omitempty"` // MongoDB replica auth password
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
|
||||
599
internal/db/clickhouse_impl.go
Normal file
599
internal/db/clickhouse_impl.go
Normal file
@@ -0,0 +1,599 @@
|
||||
//go:build gonavi_full_drivers || gonavi_clickhouse_driver
|
||||
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
clickhouse "github.com/ClickHouse/clickhouse-go/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultClickHousePort = 9000
|
||||
defaultClickHouseUser = "default"
|
||||
defaultClickHouseDatabase = "default"
|
||||
minClickHouseReadTimeout = 5 * time.Minute
|
||||
)
|
||||
|
||||
type ClickHouseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
forwarder *ssh.LocalForwarder
|
||||
database string
|
||||
}
|
||||
|
||||
func normalizeClickHouseConfig(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
normalized := applyClickHouseURI(config)
|
||||
if strings.TrimSpace(normalized.Host) == "" {
|
||||
normalized.Host = "localhost"
|
||||
}
|
||||
if normalized.Port <= 0 {
|
||||
normalized.Port = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(normalized.User) == "" {
|
||||
normalized.User = defaultClickHouseUser
|
||||
}
|
||||
if strings.TrimSpace(normalized.Database) == "" {
|
||||
normalized.Database = defaultClickHouseDatabase
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
func applyClickHouseURI(config connection.ConnectionConfig) connection.ConnectionConfig {
|
||||
uriText := strings.TrimSpace(config.URI)
|
||||
if uriText == "" {
|
||||
return config
|
||||
}
|
||||
lowerURI := strings.ToLower(uriText)
|
||||
if !strings.HasPrefix(lowerURI, "clickhouse://") {
|
||||
return config
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(uriText)
|
||||
if err != nil {
|
||||
return config
|
||||
}
|
||||
|
||||
if parsed.User != nil {
|
||||
if strings.TrimSpace(config.User) == "" {
|
||||
config.User = parsed.User.Username()
|
||||
}
|
||||
if pass, ok := parsed.User.Password(); ok && config.Password == "" {
|
||||
config.Password = pass
|
||||
}
|
||||
}
|
||||
|
||||
if dbName := strings.TrimPrefix(strings.TrimSpace(parsed.Path), "/"); dbName != "" && strings.TrimSpace(config.Database) == "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
if strings.TrimSpace(config.Database) == "" {
|
||||
if dbName := strings.TrimSpace(parsed.Query().Get("database")); dbName != "" {
|
||||
config.Database = dbName
|
||||
}
|
||||
}
|
||||
|
||||
defaultPort := config.Port
|
||||
if defaultPort <= 0 {
|
||||
defaultPort = defaultClickHousePort
|
||||
}
|
||||
if strings.TrimSpace(config.Host) == "" {
|
||||
host, port, ok := parseHostPortWithDefault(parsed.Host, defaultPort)
|
||||
if ok {
|
||||
config.Host = host
|
||||
config.Port = port
|
||||
}
|
||||
}
|
||||
if config.Port <= 0 {
|
||||
config.Port = defaultPort
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) buildClickHouseOptions(config connection.ConnectionConfig) *clickhouse.Options {
|
||||
connectTimeout := getConnectTimeout(config)
|
||||
readTimeout := connectTimeout
|
||||
if readTimeout < minClickHouseReadTimeout {
|
||||
readTimeout = minClickHouseReadTimeout
|
||||
}
|
||||
return &clickhouse.Options{
|
||||
Addr: []string{
|
||||
net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
},
|
||||
Auth: clickhouse.Auth{
|
||||
Database: strings.TrimSpace(config.Database),
|
||||
Username: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
},
|
||||
DialTimeout: connectTimeout,
|
||||
ReadTimeout: readTimeout,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Connect(config connection.ConnectionConfig) error {
|
||||
if supported, reason := DriverRuntimeSupportStatus("clickhouse"); !supported {
|
||||
if strings.TrimSpace(reason) == "" {
|
||||
reason = "ClickHouse 纯 Go 驱动未启用,请先在驱动管理中安装启用"
|
||||
}
|
||||
return fmt.Errorf("%s", reason)
|
||||
}
|
||||
|
||||
if c.forwarder != nil {
|
||||
_ = c.forwarder.Close()
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
_ = c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
|
||||
runConfig := normalizeClickHouseConfig(config)
|
||||
c.pingTimeout = getConnectTimeout(runConfig)
|
||||
c.database = runConfig.Database
|
||||
|
||||
if runConfig.UseSSH {
|
||||
logger.Infof("ClickHouse 使用 SSH 连接:地址=%s:%d 用户=%s", runConfig.Host, runConfig.Port, runConfig.User)
|
||||
forwarder, err := ssh.GetOrCreateLocalForwarder(runConfig.SSH, runConfig.Host, runConfig.Port)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建 SSH 隧道失败:%w", err)
|
||||
}
|
||||
c.forwarder = forwarder
|
||||
|
||||
host, portText, err := net.SplitHostPort(forwarder.LocalAddr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地转发地址失败:%w", err)
|
||||
}
|
||||
port, err := strconv.Atoi(portText)
|
||||
if err != nil {
|
||||
return fmt.Errorf("解析本地端口失败:%w", err)
|
||||
}
|
||||
|
||||
runConfig.Host = host
|
||||
runConfig.Port = port
|
||||
runConfig.UseSSH = false
|
||||
logger.Infof("ClickHouse 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
}
|
||||
|
||||
c.conn = clickhouse.OpenDB(c.buildClickHouseOptions(runConfig))
|
||||
|
||||
if err := c.Ping(); err != nil {
|
||||
_ = c.Close()
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Close() error {
|
||||
if c.forwarder != nil {
|
||||
if err := c.forwarder.Close(); err != nil {
|
||||
logger.Warnf("关闭 ClickHouse SSH 端口转发失败:%v", err)
|
||||
}
|
||||
c.forwarder = nil
|
||||
}
|
||||
if c.conn != nil {
|
||||
return c.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Ping() error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := c.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return c.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) QueryContext(ctx context.Context, query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
rows, err := c.conn.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
rows, err := c.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return scanRows(rows)
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) ExecContext(ctx context.Context, query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.ExecContext(ctx, query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) Exec(query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetDatabases() ([]string, error) {
|
||||
data, _, err := c.Query("SELECT name FROM system.databases ORDER BY name")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "database"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTables(dbName string) ([]string, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(
|
||||
"SELECT name FROM system.tables WHERE database = '%s' ORDER BY name",
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = "SELECT database, name FROM system.tables ORDER BY database, name"
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(data))
|
||||
for _, row := range data {
|
||||
if targetDB != "" {
|
||||
if val, ok := getClickHouseValueFromRow(row, "name", "table", "table_name"); ok {
|
||||
result = append(result, fmt.Sprintf("%v", val))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
databaseValue, hasDB := getClickHouseValueFromRow(row, "database", "schema_name")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "name", "table", "table_name")
|
||||
if hasDB && hasTable {
|
||||
result = append(result, fmt.Sprintf("%v.%v", databaseValue, tableValue))
|
||||
continue
|
||||
}
|
||||
}
|
||||
for _, value := range row {
|
||||
result = append(result, fmt.Sprintf("%v", value))
|
||||
break
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", quoteClickHouseIdentifier(database), quoteClickHouseIdentifier(table))
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(data) == 0 {
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
row := data[0]
|
||||
if val, ok := getClickHouseValueFromRow(row, "statement", "create_statement", "sql", "query"); ok {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", val))
|
||||
if text != "" {
|
||||
return text, nil
|
||||
}
|
||||
}
|
||||
|
||||
longest := ""
|
||||
for _, value := range row {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", value))
|
||||
if text == "" {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(strings.ToUpper(text), "CREATE ") && len(text) > len(longest) {
|
||||
longest = text
|
||||
}
|
||||
}
|
||||
if longest != "" {
|
||||
return longest, nil
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
database, table, err := c.resolveDatabaseAndTable(dbName, tableName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
name,
|
||||
type,
|
||||
default_kind,
|
||||
default_expression,
|
||||
is_in_primary_key,
|
||||
is_in_sorting_key,
|
||||
comment
|
||||
FROM system.columns
|
||||
WHERE database = '%s' AND table = '%s'
|
||||
ORDER BY position`,
|
||||
escapeClickHouseSQLLiteral(database),
|
||||
escapeClickHouseSQLLiteral(table),
|
||||
)
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
columns := make([]connection.ColumnDefinition, 0, len(data))
|
||||
for _, row := range data {
|
||||
nameValue, _ := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
defaultKind, _ := getClickHouseValueFromRow(row, "default_kind")
|
||||
defaultExpr, hasDefault := getClickHouseValueFromRow(row, "default_expression", "column_default")
|
||||
commentValue, _ := getClickHouseValueFromRow(row, "comment")
|
||||
inPrimary, _ := getClickHouseValueFromRow(row, "is_in_primary_key")
|
||||
inSorting, _ := getClickHouseValueFromRow(row, "is_in_sorting_key")
|
||||
|
||||
colType := strings.TrimSpace(fmt.Sprintf("%v", typeValue))
|
||||
nullable := "NO"
|
||||
if strings.HasPrefix(strings.ToLower(colType), "nullable(") {
|
||||
nullable = "YES"
|
||||
}
|
||||
|
||||
key := ""
|
||||
if isClickHouseTruthy(inPrimary) {
|
||||
key = "PRI"
|
||||
} else if isClickHouseTruthy(inSorting) {
|
||||
key = "MUL"
|
||||
}
|
||||
|
||||
extra := ""
|
||||
kindText := strings.ToUpper(strings.TrimSpace(fmt.Sprintf("%v", defaultKind)))
|
||||
if kindText != "" && kindText != "DEFAULT" {
|
||||
extra = kindText
|
||||
}
|
||||
|
||||
col := connection.ColumnDefinition{
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: colType,
|
||||
Nullable: nullable,
|
||||
Key: key,
|
||||
Extra: extra,
|
||||
Comment: strings.TrimSpace(fmt.Sprintf("%v", commentValue)),
|
||||
}
|
||||
if hasDefault && defaultExpr != nil {
|
||||
text := strings.TrimSpace(fmt.Sprintf("%v", defaultExpr))
|
||||
if text != "" {
|
||||
col.Default = &text
|
||||
}
|
||||
}
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
targetDB := strings.TrimSpace(dbName)
|
||||
if targetDB == "" {
|
||||
targetDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
|
||||
var query string
|
||||
if targetDB != "" {
|
||||
query = fmt.Sprintf(`
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database = '%s'
|
||||
ORDER BY table, position`,
|
||||
escapeClickHouseSQLLiteral(targetDB),
|
||||
)
|
||||
} else {
|
||||
query = `
|
||||
SELECT
|
||||
database,
|
||||
table,
|
||||
name,
|
||||
type
|
||||
FROM system.columns
|
||||
WHERE database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA')
|
||||
ORDER BY database, table, position`
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]connection.ColumnDefinitionWithTable, 0, len(data))
|
||||
for _, row := range data {
|
||||
databaseValue, _ := getClickHouseValueFromRow(row, "database")
|
||||
tableValue, hasTable := getClickHouseValueFromRow(row, "table", "table_name")
|
||||
nameValue, hasName := getClickHouseValueFromRow(row, "name", "column_name")
|
||||
typeValue, _ := getClickHouseValueFromRow(row, "type", "data_type")
|
||||
if !hasTable || !hasName {
|
||||
continue
|
||||
}
|
||||
|
||||
tableName := strings.TrimSpace(fmt.Sprintf("%v", tableValue))
|
||||
if targetDB == "" {
|
||||
dbText := strings.TrimSpace(fmt.Sprintf("%v", databaseValue))
|
||||
if dbText != "" {
|
||||
tableName = dbText + "." + tableName
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, connection.ColumnDefinitionWithTable{
|
||||
TableName: tableName,
|
||||
Name: strings.TrimSpace(fmt.Sprintf("%v", nameValue)),
|
||||
Type: strings.TrimSpace(fmt.Sprintf("%v", typeValue)),
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return []connection.IndexDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return []connection.ForeignKeyDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return []connection.TriggerDefinition{}, nil
|
||||
}
|
||||
|
||||
func (c *ClickHouseDB) resolveDatabaseAndTable(dbName, tableName string) (string, string, error) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
if rawTable == "" {
|
||||
return "", "", fmt.Errorf("table name required")
|
||||
}
|
||||
|
||||
resolvedDB := strings.TrimSpace(dbName)
|
||||
resolvedTable := rawTable
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
if dbPart := normalizeClickHouseIdentifierPart(parts[0]); dbPart != "" {
|
||||
resolvedDB = dbPart
|
||||
}
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(parts[1])
|
||||
} else {
|
||||
resolvedTable = normalizeClickHouseIdentifierPart(rawTable)
|
||||
}
|
||||
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = strings.TrimSpace(c.database)
|
||||
}
|
||||
if resolvedDB == "" {
|
||||
resolvedDB = defaultClickHouseDatabase
|
||||
}
|
||||
if resolvedTable == "" {
|
||||
return "", "", fmt.Errorf("table name required")
|
||||
}
|
||||
return resolvedDB, resolvedTable, nil
|
||||
}
|
||||
|
||||
func normalizeClickHouseIdentifierPart(raw string) string {
|
||||
text := strings.TrimSpace(raw)
|
||||
if len(text) >= 2 {
|
||||
first := text[0]
|
||||
last := text[len(text)-1]
|
||||
if (first == '`' && last == '`') || (first == '"' && last == '"') {
|
||||
text = text[1 : len(text)-1]
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(text)
|
||||
}
|
||||
|
||||
func quoteClickHouseIdentifier(raw string) string {
|
||||
return "`" + strings.ReplaceAll(strings.TrimSpace(raw), "`", "``") + "`"
|
||||
}
|
||||
|
||||
func escapeClickHouseSQLLiteral(raw string) string {
|
||||
return strings.ReplaceAll(strings.TrimSpace(raw), "'", "''")
|
||||
}
|
||||
|
||||
func getClickHouseValueFromRow(row map[string]interface{}, keys ...string) (interface{}, bool) {
|
||||
if len(row) == 0 {
|
||||
return nil, false
|
||||
}
|
||||
for _, key := range keys {
|
||||
if value, ok := row[key]; ok {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
for existingKey, value := range row {
|
||||
for _, key := range keys {
|
||||
if strings.EqualFold(existingKey, key) {
|
||||
return value, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func isClickHouseTruthy(value interface{}) bool {
|
||||
switch val := value.(type) {
|
||||
case bool:
|
||||
return val
|
||||
case int:
|
||||
return val != 0
|
||||
case int8:
|
||||
return val != 0
|
||||
case int16:
|
||||
return val != 0
|
||||
case int32:
|
||||
return val != 0
|
||||
case int64:
|
||||
return val != 0
|
||||
case uint:
|
||||
return val != 0
|
||||
case uint8:
|
||||
return val != 0
|
||||
case uint16:
|
||||
return val != 0
|
||||
case uint32:
|
||||
return val != 0
|
||||
case uint64:
|
||||
return val != 0
|
||||
case string:
|
||||
normalized := strings.ToLower(strings.TrimSpace(val))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
default:
|
||||
normalized := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", value)))
|
||||
return normalized == "1" || normalized == "true" || normalized == "yes" || normalized == "y"
|
||||
}
|
||||
}
|
||||
@@ -15,4 +15,5 @@ func registerOptionalDatabaseFactories() {
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("vastbase"), "vastbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mongodb"), "mongodb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("tdengine"), "tdengine")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("clickhouse"), "clickhouse")
|
||||
}
|
||||
|
||||
@@ -15,4 +15,5 @@ func registerOptionalDatabaseFactories() {
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("vastbase"), "vastbase")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("mongodb"), "mongodb")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("tdengine"), "tdengine")
|
||||
registerDatabaseFactory(newOptionalDriverAgentDatabase("clickhouse"), "clickhouse")
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ const (
|
||||
defaultDirosPort = 9030
|
||||
)
|
||||
|
||||
// DirosDB 使用独立 driver 名称(diros)接入,底层协议兼容 MySQL。
|
||||
// DirosDB 使用独立 driver 名称(diros)接入,底层协议兼容 MySQL(对外显示为 Doris)。
|
||||
type DirosDB struct {
|
||||
MySQLDB
|
||||
}
|
||||
@@ -146,7 +146,7 @@ func (d *DirosDB) getDSN(config connection.ConnectionConfig) string {
|
||||
protocol = netName
|
||||
address = normalizeMySQLAddress(config.Host, config.Port)
|
||||
} else {
|
||||
logger.Warnf("注册 Diros SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
logger.Warnf("注册 Doris SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,7 +177,7 @@ func (d *DirosDB) Connect(config connection.ConnectionConfig) error {
|
||||
runConfig := applyDirosURI(config)
|
||||
addresses := collectDirosAddresses(runConfig)
|
||||
if len(addresses) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Diros 地址")
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Doris 地址")
|
||||
}
|
||||
|
||||
var errorDetails []string
|
||||
@@ -214,7 +214,7 @@ func (d *DirosDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
|
||||
if len(errorDetails) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Diros 地址")
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的 Doris 地址")
|
||||
}
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(errorDetails, ";"))
|
||||
}
|
||||
|
||||
@@ -18,18 +18,19 @@ var coreBuiltinDrivers = map[string]struct{}{
|
||||
// optionalGoDrivers 表示需要用户“安装启用”后才能使用的纯 Go 驱动。
|
||||
// 注意:这是一种运行时门控(installed.json 标记),并不减少主二进制体积。
|
||||
var optionalGoDrivers = map[string]struct{}{
|
||||
"mariadb": {},
|
||||
"diros": {},
|
||||
"sphinx": {},
|
||||
"sqlserver": {},
|
||||
"sqlite": {},
|
||||
"duckdb": {},
|
||||
"dameng": {},
|
||||
"kingbase": {},
|
||||
"highgo": {},
|
||||
"vastbase": {},
|
||||
"mongodb": {},
|
||||
"tdengine": {},
|
||||
"mariadb": {},
|
||||
"diros": {},
|
||||
"sphinx": {},
|
||||
"sqlserver": {},
|
||||
"sqlite": {},
|
||||
"duckdb": {},
|
||||
"dameng": {},
|
||||
"kingbase": {},
|
||||
"highgo": {},
|
||||
"vastbase": {},
|
||||
"mongodb": {},
|
||||
"tdengine": {},
|
||||
"clickhouse": {},
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -60,7 +61,7 @@ func driverDisplayName(driverType string) string {
|
||||
case "mariadb":
|
||||
return "MariaDB"
|
||||
case "diros":
|
||||
return "Diros"
|
||||
return "Doris"
|
||||
case "sphinx":
|
||||
return "Sphinx"
|
||||
case "postgres":
|
||||
@@ -83,6 +84,8 @@ func driverDisplayName(driverType string) string {
|
||||
return "MongoDB"
|
||||
case "tdengine":
|
||||
return "TDengine"
|
||||
case "clickhouse":
|
||||
return "ClickHouse"
|
||||
default:
|
||||
return strings.ToUpper(strings.TrimSpace(driverType))
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ package db
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
@@ -114,3 +115,72 @@ func TestTDengineDSN_UsesWebSocketFormat(t *testing.T) {
|
||||
t.Fatalf("tdengine dsn 格式不正确:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClickHouseOptions_UsesStructuredTimeoutAndAuth(t *testing.T) {
|
||||
c := &ClickHouseDB{}
|
||||
cfg := normalizeClickHouseConfig(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Host: "127.0.0.1",
|
||||
Port: 9000,
|
||||
User: "default",
|
||||
Password: "p@ss:wo/rd",
|
||||
Database: "analytics",
|
||||
Timeout: 15,
|
||||
})
|
||||
|
||||
opts := c.buildClickHouseOptions(cfg)
|
||||
if opts == nil {
|
||||
t.Fatal("options 为空")
|
||||
}
|
||||
if len(opts.Addr) != 1 || opts.Addr[0] != "127.0.0.1:9000" {
|
||||
t.Fatalf("addr 不符合预期:%v", opts.Addr)
|
||||
}
|
||||
if opts.Auth.Username != "default" {
|
||||
t.Fatalf("username 不符合预期:%s", opts.Auth.Username)
|
||||
}
|
||||
if opts.Auth.Password != cfg.Password {
|
||||
t.Fatalf("password 不符合预期:%s", opts.Auth.Password)
|
||||
}
|
||||
if opts.Auth.Database != "analytics" {
|
||||
t.Fatalf("database 不符合预期:%s", opts.Auth.Database)
|
||||
}
|
||||
if opts.DialTimeout != 15*time.Second {
|
||||
t.Fatalf("dial timeout 不符合预期:%s", opts.DialTimeout)
|
||||
}
|
||||
if opts.ReadTimeout != minClickHouseReadTimeout {
|
||||
t.Fatalf("read timeout 不符合预期:%s", opts.ReadTimeout)
|
||||
}
|
||||
if _, ok := opts.Settings["write_timeout"]; ok {
|
||||
t.Fatalf("options 不应包含 write_timeout 设置:%v", opts.Settings)
|
||||
}
|
||||
if _, ok := opts.Settings["read_timeout"]; ok {
|
||||
t.Fatalf("options 不应通过 settings 传递 read_timeout:%v", opts.Settings)
|
||||
}
|
||||
if _, ok := opts.Settings["dial_timeout"]; ok {
|
||||
t.Fatalf("options 不应通过 settings 传递 dial_timeout:%v", opts.Settings)
|
||||
}
|
||||
}
|
||||
|
||||
func TestClickHouseOptions_ReadTimeoutUsesLargerConfiguredTimeout(t *testing.T) {
|
||||
c := &ClickHouseDB{}
|
||||
cfg := normalizeClickHouseConfig(connection.ConnectionConfig{
|
||||
Type: "clickhouse",
|
||||
Host: "127.0.0.1",
|
||||
Port: 9000,
|
||||
User: "default",
|
||||
Password: "secret",
|
||||
Database: "analytics",
|
||||
Timeout: 900,
|
||||
})
|
||||
|
||||
opts := c.buildClickHouseOptions(cfg)
|
||||
if opts == nil {
|
||||
t.Fatal("options 为空")
|
||||
}
|
||||
if opts.DialTimeout != 900*time.Second {
|
||||
t.Fatalf("dial timeout 不符合预期:%s", opts.DialTimeout)
|
||||
}
|
||||
if opts.ReadTimeout != 900*time.Second {
|
||||
t.Fatalf("read timeout 不符合预期:%s", opts.ReadTimeout)
|
||||
}
|
||||
}
|
||||
|
||||
53
internal/db/json_decode.go
Normal file
53
internal/db/json_decode.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
func decodeJSONWithUseNumber(data []byte, out interface{}) error {
|
||||
if out == nil {
|
||||
return nil
|
||||
}
|
||||
decoder := json.NewDecoder(bytes.NewReader(data))
|
||||
decoder.UseNumber()
|
||||
if err := decoder.Decode(out); err != nil {
|
||||
return err
|
||||
}
|
||||
normalizeDecodedJSONNumbers(out)
|
||||
return nil
|
||||
}
|
||||
|
||||
func normalizeDecodedJSONNumbers(out interface{}) {
|
||||
switch typed := out.(type) {
|
||||
case *[]map[string]interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
for i := range *typed {
|
||||
row := (*typed)[i]
|
||||
for key, value := range row {
|
||||
row[key] = normalizeQueryValue(value)
|
||||
}
|
||||
}
|
||||
case *map[string]interface{}:
|
||||
if typed == nil || *typed == nil {
|
||||
return
|
||||
}
|
||||
for key, value := range *typed {
|
||||
(*typed)[key] = normalizeQueryValue(value)
|
||||
}
|
||||
case *[]interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
for i, item := range *typed {
|
||||
(*typed)[i] = normalizeQueryValue(item)
|
||||
}
|
||||
case *interface{}:
|
||||
if typed == nil {
|
||||
return
|
||||
}
|
||||
*typed = normalizeQueryValue(*typed)
|
||||
}
|
||||
}
|
||||
58
internal/db/json_decode_test.go
Normal file
58
internal/db/json_decode_test.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestDecodeJSONWithUseNumber_QueryRowsPreserveUnsafeInteger(t *testing.T) {
|
||||
raw := []byte(`[{"id":9007199254740993,"safe":123,"nested":{"n":9007199254740992},"arr":[9007199254740992,1],"decimal":1.25}]`)
|
||||
var out []map[string]interface{}
|
||||
|
||||
if err := decodeJSONWithUseNumber(raw, &out); err != nil {
|
||||
t.Fatalf("解码失败: %v", err)
|
||||
}
|
||||
if len(out) != 1 {
|
||||
t.Fatalf("期望 1 行,实际 %d", len(out))
|
||||
}
|
||||
|
||||
row := out[0]
|
||||
if got, ok := row["id"].(string); !ok || got != "9007199254740993" {
|
||||
t.Fatalf("id 应为 string 且保持精度,实际=%v(%T)", row["id"], row["id"])
|
||||
}
|
||||
if got, ok := row["safe"].(int64); !ok || got != 123 {
|
||||
t.Fatalf("safe 应为 int64(123),实际=%v(%T)", row["safe"], row["safe"])
|
||||
}
|
||||
nested, ok := row["nested"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("nested 类型异常:%T", row["nested"])
|
||||
}
|
||||
if got, ok := nested["n"].(string); !ok || got != "9007199254740992" {
|
||||
t.Fatalf("nested.n 应为 string 且保持精度,实际=%v(%T)", nested["n"], nested["n"])
|
||||
}
|
||||
arr, ok := row["arr"].([]interface{})
|
||||
if !ok || len(arr) != 2 {
|
||||
t.Fatalf("arr 类型异常:%v(%T)", row["arr"], row["arr"])
|
||||
}
|
||||
if got, ok := arr[0].(string); !ok || got != "9007199254740992" {
|
||||
t.Fatalf("arr[0] 应为 string 且保持精度,实际=%v(%T)", arr[0], arr[0])
|
||||
}
|
||||
if got, ok := arr[1].(int64); !ok || got != 1 {
|
||||
t.Fatalf("arr[1] 应为 int64(1),实际=%v(%T)", arr[1], arr[1])
|
||||
}
|
||||
if got, ok := row["decimal"].(float64); !ok || got != 1.25 {
|
||||
t.Fatalf("decimal 应为 float64(1.25),实际=%v(%T)", row["decimal"], row["decimal"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeJSONWithUseNumber_TypedStruct(t *testing.T) {
|
||||
type item struct {
|
||||
ID int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
var out []item
|
||||
if err := decodeJSONWithUseNumber([]byte(`[{"id":7,"name":"ok"}]`), &out); err != nil {
|
||||
t.Fatalf("解码失败: %v", err)
|
||||
}
|
||||
if len(out) != 1 || out[0].ID != 7 || out[0].Name != "ok" {
|
||||
t.Fatalf("结构体解码结果异常:%+v", out)
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
proxytunnel "GoNavi-Wails/internal/proxy"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
|
||||
"go.mongodb.org/mongo-driver/v2/bson"
|
||||
@@ -29,6 +30,14 @@ type MongoDB struct {
|
||||
forwarder *ssh.LocalForwarder
|
||||
}
|
||||
|
||||
type mongoProxyDialer struct {
|
||||
proxyConfig connection.ProxyConfig
|
||||
}
|
||||
|
||||
func (d *mongoProxyDialer) DialContext(ctx context.Context, network, address string) (net.Conn, error) {
|
||||
return proxytunnel.DialContext(ctx, d.proxyConfig, network, address)
|
||||
}
|
||||
|
||||
const defaultMongoPort = 27017
|
||||
|
||||
func normalizeMongoAddress(host string, port int) string {
|
||||
@@ -328,6 +337,9 @@ func (m *MongoDB) Connect(config connection.ConnectionConfig) error {
|
||||
|
||||
uri := m.getURI(attemptConfig)
|
||||
clientOpts := options.Client().ApplyURI(uri)
|
||||
if attemptConfig.UseProxy {
|
||||
clientOpts.SetDialer(&mongoProxyDialer{proxyConfig: attemptConfig.Proxy})
|
||||
}
|
||||
client, err := mongo.Connect(clientOpts)
|
||||
if err != nil {
|
||||
errorDetails = append(errorDetails, fmt.Sprintf("%s连接失败: %v", authLabel, err))
|
||||
|
||||
@@ -174,7 +174,7 @@ func (c *mysqlAgentClient) call(req mysqlAgentRequest, out interface{}, fields *
|
||||
*rowsAffected = resp.RowsAffected
|
||||
}
|
||||
if out != nil && len(resp.Data) > 0 {
|
||||
if err := json.Unmarshal(resp.Data, out); err != nil {
|
||||
if err := decodeJSONWithUseNumber(resp.Data, out); err != nil {
|
||||
return fmt.Errorf("解析 MySQL 驱动代理数据失败:%w", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,10 @@ import (
|
||||
"os/exec"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -38,6 +40,7 @@ type optionalAgentRequest struct {
|
||||
Method string `json:"method"`
|
||||
Config *connection.ConnectionConfig `json:"config,omitempty"`
|
||||
Query string `json:"query,omitempty"`
|
||||
TimeoutMs int64 `json:"timeoutMs,omitempty"`
|
||||
DBName string `json:"dbName,omitempty"`
|
||||
TableName string `json:"tableName,omitempty"`
|
||||
Changes *connection.ChangeSet `json:"changes,omitempty"`
|
||||
@@ -176,7 +179,7 @@ func (c *optionalDriverAgentClient) call(req optionalAgentRequest, out interface
|
||||
*rowsAffected = resp.RowsAffected
|
||||
}
|
||||
if out != nil && len(resp.Data) > 0 {
|
||||
if err := json.Unmarshal(resp.Data, out); err != nil {
|
||||
if err := decodeJSONWithUseNumber(resp.Data, out); err != nil {
|
||||
return fmt.Errorf("解析 %s 驱动代理数据失败:%w", driverDisplayName(c.driver), err)
|
||||
}
|
||||
}
|
||||
@@ -223,6 +226,7 @@ func (d *OptionalDriverAgentDB) Connect(config connection.ConnectionConfig) erro
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
logger.Infof("%s 驱动代理路径:%s", driverDisplayName(d.driverType), executablePath)
|
||||
client, err := newOptionalDriverAgentClient(d.driverType, executablePath)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -260,7 +264,20 @@ func (d *OptionalDriverAgentDB) QueryContext(ctx context.Context, query string)
|
||||
if err := ctx.Err(); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return d.Query(query)
|
||||
client, err := d.requireClient()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
var data []map[string]interface{}
|
||||
var fields []string
|
||||
if err := client.call(optionalAgentRequest{
|
||||
Method: optionalAgentMethodQuery,
|
||||
Query: query,
|
||||
TimeoutMs: timeoutMsFromContext(ctx),
|
||||
}, &data, &fields, nil); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return data, fields, nil
|
||||
}
|
||||
|
||||
func (d *OptionalDriverAgentDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
@@ -283,7 +300,19 @@ func (d *OptionalDriverAgentDB) ExecContext(ctx context.Context, query string) (
|
||||
if err := ctx.Err(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return d.Exec(query)
|
||||
client, err := d.requireClient()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
var affected int64
|
||||
if err := client.call(optionalAgentRequest{
|
||||
Method: optionalAgentMethodExec,
|
||||
Query: query,
|
||||
TimeoutMs: timeoutMsFromContext(ctx),
|
||||
}, nil, nil, &affected); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return affected, nil
|
||||
}
|
||||
|
||||
func (d *OptionalDriverAgentDB) Exec(query string) (int64, error) {
|
||||
@@ -443,3 +472,15 @@ func (d *OptionalDriverAgentDB) requireClient() (*optionalDriverAgentClient, err
|
||||
}
|
||||
return d.client, nil
|
||||
}
|
||||
|
||||
func timeoutMsFromContext(ctx context.Context) int64 {
|
||||
deadline, ok := ctx.Deadline()
|
||||
if !ok {
|
||||
return 0
|
||||
}
|
||||
remaining := time.Until(deadline).Milliseconds()
|
||||
if remaining <= 0 {
|
||||
return 1
|
||||
}
|
||||
return remaining
|
||||
}
|
||||
|
||||
32
internal/db/optional_driver_agent_impl_test.go
Normal file
32
internal/db/optional_driver_agent_impl_test.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestTimeoutMsFromContext_NoDeadline(t *testing.T) {
|
||||
if got := timeoutMsFromContext(context.Background()); got != 0 {
|
||||
t.Fatalf("无 deadline 时应返回 0,got=%d", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeoutMsFromContext_WithDeadline(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
|
||||
defer cancel()
|
||||
|
||||
got := timeoutMsFromContext(ctx)
|
||||
if got <= 0 {
|
||||
t.Fatalf("有 deadline 时应返回正值,got=%d", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeoutMsFromContext_ExpiredDeadline(t *testing.T) {
|
||||
ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(-time.Second))
|
||||
defer cancel()
|
||||
|
||||
if got := timeoutMsFromContext(ctx); got != 1 {
|
||||
t.Fatalf("过期 deadline 应返回 1,got=%d", got)
|
||||
}
|
||||
}
|
||||
@@ -26,10 +26,7 @@ type OracleDB struct {
|
||||
|
||||
func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// oracle://user:pass@host:port/service_name
|
||||
database := config.Database
|
||||
if database == "" {
|
||||
database = config.User // Default to user service/schema if empty?
|
||||
}
|
||||
database := strings.TrimSpace(config.Database)
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "oracle",
|
||||
@@ -44,6 +41,10 @@ func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
var dsn string
|
||||
var err error
|
||||
serviceName := strings.TrimSpace(config.Database)
|
||||
if serviceName == "" {
|
||||
return fmt.Errorf("Oracle 连接缺少服务名(Service Name),请在连接配置中填写,例如 ORCLPDB1")
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
|
||||
48
internal/db/postgres_connect_test.go
Normal file
48
internal/db/postgres_connect_test.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestResolvePostgresConnectDatabases_ExplicitDatabase(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
Database: "analytics",
|
||||
User: "app_user",
|
||||
}
|
||||
|
||||
got := resolvePostgresConnectDatabases(cfg)
|
||||
want := []string{"analytics"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected databases, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePostgresConnectDatabases_FallbackOrder(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
User: "app_user",
|
||||
}
|
||||
|
||||
got := resolvePostgresConnectDatabases(cfg)
|
||||
want := []string{"postgres", "template1", "app_user"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected databases, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePostgresConnectDatabases_DeduplicateUserDefault(t *testing.T) {
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
User: "postgres",
|
||||
}
|
||||
|
||||
got := resolvePostgresConnectDatabases(cfg)
|
||||
want := []string{"postgres", "template1"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected databases, got=%v want=%v", got, want)
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,30 @@ type PostgresDB struct {
|
||||
forwarder *ssh.LocalForwarder // Store SSH tunnel forwarder
|
||||
}
|
||||
|
||||
func resolvePostgresConnectDatabases(config connection.ConnectionConfig) []string {
|
||||
explicit := strings.TrimSpace(config.Database)
|
||||
if explicit != "" {
|
||||
return []string{explicit}
|
||||
}
|
||||
|
||||
candidates := []string{"postgres", "template1", strings.TrimSpace(config.User)}
|
||||
seen := make(map[string]struct{}, len(candidates))
|
||||
result := make([]string, 0, len(candidates))
|
||||
for _, name := range candidates {
|
||||
trimmed := strings.TrimSpace(name)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
normalized := strings.ToLower(trimmed)
|
||||
if _, exists := seen[normalized]; exists {
|
||||
continue
|
||||
}
|
||||
seen[normalized] = struct{}{}
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (p *PostgresDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// postgres://user:password@host:port/dbname?sslmode=disable
|
||||
dbname := config.Database
|
||||
@@ -53,8 +77,23 @@ func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
return fmt.Errorf("%s", reason)
|
||||
}
|
||||
|
||||
var dsn string
|
||||
var err error
|
||||
runConfig := config
|
||||
p.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
cleanupOnFailure := true
|
||||
defer func() {
|
||||
if !cleanupOnFailure {
|
||||
return
|
||||
}
|
||||
if p.conn != nil {
|
||||
_ = p.conn.Close()
|
||||
p.conn = nil
|
||||
}
|
||||
if p.forwarder != nil {
|
||||
_ = p.forwarder.Close()
|
||||
p.forwarder = nil
|
||||
}
|
||||
}()
|
||||
|
||||
if config.UseSSH {
|
||||
// Create SSH tunnel with local port forwarding
|
||||
@@ -83,24 +122,44 @@ func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
localConfig.Port = port
|
||||
localConfig.UseSSH = false // Disable SSH flag for DSN generation
|
||||
|
||||
dsn = p.getDSN(localConfig)
|
||||
runConfig = localConfig
|
||||
logger.Infof("PostgreSQL 通过本地端口转发连接:%s -> %s:%d", forwarder.LocalAddr, config.Host, config.Port)
|
||||
} else {
|
||||
dsn = p.getDSN(config)
|
||||
}
|
||||
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
p.conn = db
|
||||
p.pingTimeout = getConnectTimeout(config)
|
||||
attemptDBs := resolvePostgresConnectDatabases(runConfig)
|
||||
var failures []string
|
||||
for _, dbName := range attemptDBs {
|
||||
attemptConfig := runConfig
|
||||
attemptConfig.Database = dbName
|
||||
dsn := p.getDSN(attemptConfig)
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
dbConn, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
failures = append(failures, fmt.Sprintf("数据库=%s 打开连接失败: %v", dbName, err))
|
||||
continue
|
||||
}
|
||||
p.conn = dbConn
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
failures = append(failures, fmt.Sprintf("数据库=%s 验证失败: %v", dbName, err))
|
||||
_ = dbConn.Close()
|
||||
p.conn = nil
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.TrimSpace(config.Database) == "" && !strings.EqualFold(dbName, "postgres") {
|
||||
logger.Infof("PostgreSQL 自动选择连接数据库:%s", dbName)
|
||||
}
|
||||
|
||||
cleanupOnFailure = false
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
|
||||
if len(failures) == 0 {
|
||||
return fmt.Errorf("连接建立后验证失败:未找到可用的连接数据库")
|
||||
}
|
||||
return fmt.Errorf("连接建立后验证失败:%s", strings.Join(failures, ";"))
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Close() error {
|
||||
|
||||
@@ -2,12 +2,27 @@ package db
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
jsMaxSafeInteger int64 = 9007199254740991
|
||||
jsMinSafeInteger int64 = -9007199254740991
|
||||
jsMaxSafeUint uint64 = 9007199254740991
|
||||
)
|
||||
|
||||
var (
|
||||
jsMaxSafeBigInt = big.NewInt(jsMaxSafeInteger)
|
||||
jsMinSafeBigInt = big.NewInt(jsMinSafeInteger)
|
||||
)
|
||||
|
||||
// normalizeQueryValue normalizes driver-returned values for UI/JSON transport.
|
||||
// 当前主要处理 []byte:如果是可读文本则转为 string,否则转为十六进制字符串,避免前端出现“空白值”。
|
||||
func normalizeQueryValue(v interface{}) interface{} {
|
||||
@@ -18,7 +33,114 @@ func normalizeQueryValueWithDBType(v interface{}, databaseTypeName string) inter
|
||||
if b, ok := v.([]byte); ok {
|
||||
return bytesToDisplayValue(b, databaseTypeName)
|
||||
}
|
||||
return v
|
||||
return normalizeCompositeQueryValue(v)
|
||||
}
|
||||
|
||||
func normalizeCompositeQueryValue(v interface{}) interface{} {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch typed := v.(type) {
|
||||
case []interface{}:
|
||||
items := make([]interface{}, len(typed))
|
||||
for i, item := range typed {
|
||||
items[i] = normalizeQueryValue(item)
|
||||
}
|
||||
return items
|
||||
case map[string]interface{}:
|
||||
out := make(map[string]interface{}, len(typed))
|
||||
for key, value := range typed {
|
||||
out[key] = normalizeQueryValue(value)
|
||||
}
|
||||
return out
|
||||
case json.Number:
|
||||
return normalizeJSONNumberForJS(typed)
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Pointer:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
return normalizeQueryValue(rv.Elem().Interface())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
out := make(map[string]interface{}, rv.Len())
|
||||
iter := rv.MapRange()
|
||||
for iter.Next() {
|
||||
out[mapKeyToString(iter.Key().Interface())] = normalizeQueryValue(iter.Value().Interface())
|
||||
}
|
||||
return out
|
||||
case reflect.Slice, reflect.Array:
|
||||
// []byte 在上层已单独处理,这里保留对其它切片/数组的递归规整。
|
||||
if rv.Kind() == reflect.Slice && rv.IsNil() {
|
||||
return nil
|
||||
}
|
||||
size := rv.Len()
|
||||
items := make([]interface{}, size)
|
||||
for i := 0; i < size; i++ {
|
||||
items[i] = normalizeQueryValue(rv.Index(i).Interface())
|
||||
}
|
||||
return items
|
||||
default:
|
||||
return normalizeUnsafeIntegerForJS(rv, v)
|
||||
}
|
||||
}
|
||||
|
||||
func normalizeJSONNumberForJS(n json.Number) interface{} {
|
||||
text := strings.TrimSpace(n.String())
|
||||
if text == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
if integer, ok := parseJSONInteger(text); ok {
|
||||
if integer.Cmp(jsMaxSafeBigInt) > 0 || integer.Cmp(jsMinSafeBigInt) < 0 {
|
||||
return text
|
||||
}
|
||||
return integer.Int64()
|
||||
}
|
||||
|
||||
if f, err := n.Float64(); err == nil {
|
||||
return f
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func parseJSONInteger(text string) (*big.Int, bool) {
|
||||
if text == "" {
|
||||
return nil, false
|
||||
}
|
||||
start := 0
|
||||
if text[0] == '+' || text[0] == '-' {
|
||||
if len(text) == 1 {
|
||||
return nil, false
|
||||
}
|
||||
start = 1
|
||||
}
|
||||
for i := start; i < len(text); i++ {
|
||||
if text[i] < '0' || text[i] > '9' {
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
value, ok := new(big.Int).SetString(text, 10)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
return value, true
|
||||
}
|
||||
|
||||
func mapKeyToString(key interface{}) string {
|
||||
if key == nil {
|
||||
return "null"
|
||||
}
|
||||
if s, ok := key.(string); ok {
|
||||
return s
|
||||
}
|
||||
return fmt.Sprintf("%v", key)
|
||||
}
|
||||
|
||||
func bytesToDisplayValue(b []byte, databaseTypeName string) interface{} {
|
||||
@@ -33,8 +155,7 @@ func bytesToDisplayValue(b []byte, databaseTypeName string) interface{} {
|
||||
if isBitLikeDBType(dbType) {
|
||||
if u, ok := bytesToUint64(b); ok {
|
||||
// JS number precision is limited; keep large bitmasks as string.
|
||||
const maxSafeInteger = 9007199254740991 // 2^53 - 1
|
||||
if u <= maxSafeInteger {
|
||||
if u <= jsMaxSafeUint {
|
||||
return int64(u)
|
||||
}
|
||||
return fmt.Sprintf("%d", u)
|
||||
@@ -89,6 +210,25 @@ func bytesToUint64(b []byte) (uint64, bool) {
|
||||
return u, true
|
||||
}
|
||||
|
||||
func normalizeUnsafeIntegerForJS(rv reflect.Value, original interface{}) interface{} {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
n := rv.Int()
|
||||
if n > jsMaxSafeInteger || n < jsMinSafeInteger {
|
||||
return strconv.FormatInt(n, 10)
|
||||
}
|
||||
return original
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
u := rv.Uint()
|
||||
if u > jsMaxSafeUint {
|
||||
return strconv.FormatUint(u, 10)
|
||||
}
|
||||
return original
|
||||
default:
|
||||
return original
|
||||
}
|
||||
}
|
||||
|
||||
func isMostlyPrintable(s string) bool {
|
||||
if s == "" {
|
||||
return true
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
package db
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type duckMapLike map[any]any
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_BitBytes(t *testing.T) {
|
||||
v := normalizeQueryValueWithDBType([]byte{0x00}, "BIT")
|
||||
@@ -42,3 +47,121 @@ func TestNormalizeQueryValueWithDBType_ByteFallbacks(t *testing.T) {
|
||||
t.Fatalf("未知类型 0xff 期望返回 0xff,实际=%v(%T)", v, v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_MapAnyAnyForJSON(t *testing.T) {
|
||||
input := duckMapLike{
|
||||
"id": int64(1),
|
||||
1: "one",
|
||||
true: []interface{}{duckMapLike{2: "two"}},
|
||||
"bytes": []byte("ok"),
|
||||
}
|
||||
|
||||
v := normalizeQueryValueWithDBType(input, "")
|
||||
root, ok := v.(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("期望转换为 map[string]interface{},实际=%T", v)
|
||||
}
|
||||
|
||||
if root["id"] != int64(1) {
|
||||
t.Fatalf("id 字段异常,实际=%v(%T)", root["id"], root["id"])
|
||||
}
|
||||
if root["1"] != "one" {
|
||||
t.Fatalf("数字 key 未被字符串化,实际=%v(%T)", root["1"], root["1"])
|
||||
}
|
||||
if root["bytes"] != "ok" {
|
||||
t.Fatalf("嵌套 []byte 未被转换,实际=%v(%T)", root["bytes"], root["bytes"])
|
||||
}
|
||||
|
||||
arr, ok := root["true"].([]interface{})
|
||||
if !ok || len(arr) != 1 {
|
||||
t.Fatalf("bool key 下的数组结构异常,实际=%v(%T)", root["true"], root["true"])
|
||||
}
|
||||
nested, ok := arr[0].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("嵌套 map 未被转换,实际=%v(%T)", arr[0], arr[0])
|
||||
}
|
||||
if nested["2"] != "two" {
|
||||
t.Fatalf("嵌套 map 数字 key 未转换,实际=%v(%T)", nested["2"], nested["2"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_UnsafeIntegersAsString(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
input interface{}
|
||||
want string
|
||||
}{
|
||||
{name: "int64 overflow", input: int64(9007199254740992), want: "9007199254740992"},
|
||||
{name: "int64 underflow", input: int64(-9007199254740992), want: "-9007199254740992"},
|
||||
{name: "uint64 overflow", input: uint64(9007199254740992), want: "9007199254740992"},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got := normalizeQueryValueWithDBType(tc.input, "")
|
||||
if got != tc.want {
|
||||
t.Fatalf("期望=%q,实际=%v(%T)", tc.want, got, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_SafeIntegersKeepType(t *testing.T) {
|
||||
got := normalizeQueryValueWithDBType(int64(9007199254740991), "")
|
||||
if _, ok := got.(int64); !ok {
|
||||
t.Fatalf("安全范围 int64 应保持数字类型,实际=%v(%T)", got, got)
|
||||
}
|
||||
|
||||
got = normalizeQueryValueWithDBType(uint64(9007199254740991), "")
|
||||
if _, ok := got.(uint64); !ok {
|
||||
t.Fatalf("安全范围 uint64 应保持数字类型,实际=%v(%T)", got, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeQueryValueWithDBType_JSONNumber(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
input json.Number
|
||||
wantType string
|
||||
wantValue string
|
||||
}{
|
||||
{name: "safe integer", input: json.Number("9007199254740991"), wantType: "int64", wantValue: "9007199254740991"},
|
||||
{name: "unsafe integer", input: json.Number("9007199254740992"), wantType: "string", wantValue: "9007199254740992"},
|
||||
{name: "unsafe negative integer", input: json.Number("-9007199254740992"), wantType: "string", wantValue: "-9007199254740992"},
|
||||
{name: "decimal", input: json.Number("12.5"), wantType: "float64", wantValue: "12.5"},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
got := normalizeQueryValueWithDBType(tc.input, "")
|
||||
switch tc.wantType {
|
||||
case "int64":
|
||||
v, ok := got.(int64)
|
||||
if !ok {
|
||||
t.Fatalf("期望 int64,实际=%T", got)
|
||||
}
|
||||
if v != 9007199254740991 {
|
||||
t.Fatalf("期望值=%s,实际=%d", tc.wantValue, v)
|
||||
}
|
||||
case "string":
|
||||
v, ok := got.(string)
|
||||
if !ok {
|
||||
t.Fatalf("期望 string,实际=%T", got)
|
||||
}
|
||||
if v != tc.wantValue {
|
||||
t.Fatalf("期望值=%s,实际=%s", tc.wantValue, v)
|
||||
}
|
||||
case "float64":
|
||||
v, ok := got.(float64)
|
||||
if !ok {
|
||||
t.Fatalf("期望 float64,实际=%T", got)
|
||||
}
|
||||
if v != 12.5 {
|
||||
t.Fatalf("期望值=%s,实际=%v", tc.wantValue, v)
|
||||
}
|
||||
default:
|
||||
t.Fatalf("未知断言类型:%s", tc.wantType)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
344
internal/proxy/proxy.go
Normal file
344
internal/proxy/proxy.go
Normal file
@@ -0,0 +1,344 @@
|
||||
package proxy
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
|
||||
xproxy "golang.org/x/net/proxy"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultDialTimeout = 8 * time.Second
|
||||
)
|
||||
|
||||
type LocalForwarder struct {
|
||||
LocalAddr string
|
||||
RemoteAddr string
|
||||
ProxyAddr string
|
||||
ProxyType string
|
||||
|
||||
cfg connection.ProxyConfig
|
||||
listener net.Listener
|
||||
closeChan chan struct{}
|
||||
closeOnce sync.Once
|
||||
|
||||
closed bool
|
||||
closedMu sync.RWMutex
|
||||
}
|
||||
|
||||
var (
|
||||
forwarderMu sync.RWMutex
|
||||
localForwarders = make(map[string]*LocalForwarder)
|
||||
)
|
||||
|
||||
func NormalizeConfig(config connection.ProxyConfig) (connection.ProxyConfig, error) {
|
||||
result := connection.ProxyConfig{
|
||||
Type: strings.ToLower(strings.TrimSpace(config.Type)),
|
||||
Host: strings.TrimSpace(config.Host),
|
||||
Port: config.Port,
|
||||
User: strings.TrimSpace(config.User),
|
||||
Password: config.Password,
|
||||
}
|
||||
|
||||
switch result.Type {
|
||||
case "socks5", "socks5h", "http":
|
||||
default:
|
||||
return result, fmt.Errorf("不支持的代理类型:%s", config.Type)
|
||||
}
|
||||
if result.Type == "socks5h" {
|
||||
result.Type = "socks5"
|
||||
}
|
||||
if result.Host == "" {
|
||||
return result, fmt.Errorf("代理主机为空")
|
||||
}
|
||||
if result.Port <= 0 || result.Port > 65535 {
|
||||
return result, fmt.Errorf("代理端口无效:%d", result.Port)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func GetOrCreateLocalForwarder(proxyConfig connection.ProxyConfig, remoteHost string, remotePort int) (*LocalForwarder, error) {
|
||||
cfg, err := NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if strings.TrimSpace(remoteHost) == "" || remotePort <= 0 {
|
||||
return nil, fmt.Errorf("无效的远端地址:%s:%d", remoteHost, remotePort)
|
||||
}
|
||||
|
||||
key := forwarderCacheKey(cfg, remoteHost, remotePort)
|
||||
forwarderMu.RLock()
|
||||
forwarder, exists := localForwarders[key]
|
||||
forwarderMu.RUnlock()
|
||||
if exists && forwarder != nil && !forwarder.IsClosed() {
|
||||
return forwarder, nil
|
||||
}
|
||||
|
||||
if exists {
|
||||
forwarderMu.Lock()
|
||||
delete(localForwarders, key)
|
||||
forwarderMu.Unlock()
|
||||
}
|
||||
|
||||
next, err := NewLocalForwarder(cfg, remoteHost, remotePort)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
forwarderMu.Lock()
|
||||
localForwarders[key] = next
|
||||
forwarderMu.Unlock()
|
||||
return next, nil
|
||||
}
|
||||
|
||||
func forwarderCacheKey(cfg connection.ProxyConfig, remoteHost string, remotePort int) string {
|
||||
trimmedHost := strings.TrimSpace(remoteHost)
|
||||
credential := cfg.User + "\x00" + cfg.Password
|
||||
credentialHash := sha256.Sum256([]byte(credential))
|
||||
// 仅保留短指纹用于区分不同认证信息,避免在 key 日志中泄露明文口令。
|
||||
fingerprint := hex.EncodeToString(credentialHash[:8])
|
||||
return fmt.Sprintf("%s://%s:%d@%s:%d#%s", cfg.Type, cfg.Host, cfg.Port, trimmedHost, remotePort, fingerprint)
|
||||
}
|
||||
|
||||
func NewLocalForwarder(proxyConfig connection.ProxyConfig, remoteHost string, remotePort int) (*LocalForwarder, error) {
|
||||
cfg, err := NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
listener, err := net.Listen("tcp", "127.0.0.1:0")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("创建本地代理监听失败:%w", err)
|
||||
}
|
||||
|
||||
localAddr := listener.Addr().String()
|
||||
remoteAddr := net.JoinHostPort(strings.TrimSpace(remoteHost), fmt.Sprintf("%d", remotePort))
|
||||
proxyAddr := net.JoinHostPort(cfg.Host, fmt.Sprintf("%d", cfg.Port))
|
||||
forwarder := &LocalForwarder{
|
||||
LocalAddr: localAddr,
|
||||
RemoteAddr: remoteAddr,
|
||||
ProxyAddr: proxyAddr,
|
||||
ProxyType: cfg.Type,
|
||||
cfg: cfg,
|
||||
listener: listener,
|
||||
closeChan: make(chan struct{}),
|
||||
}
|
||||
|
||||
go forwarder.forward()
|
||||
logger.Infof("已创建代理端口转发:本地 %s -> 远端 %s(代理 %s://%s)", localAddr, remoteAddr, cfg.Type, proxyAddr)
|
||||
return forwarder, nil
|
||||
}
|
||||
|
||||
func (f *LocalForwarder) forward() {
|
||||
for {
|
||||
localConn, err := f.listener.Accept()
|
||||
if err != nil {
|
||||
select {
|
||||
case <-f.closeChan:
|
||||
return
|
||||
default:
|
||||
logger.Warnf("接受本地代理连接失败:%v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
go f.handleConnection(localConn)
|
||||
}
|
||||
}
|
||||
|
||||
func (f *LocalForwarder) handleConnection(localConn net.Conn) {
|
||||
defer localConn.Close()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), defaultDialTimeout)
|
||||
remoteConn, err := dialThroughProxy(ctx, f.cfg, "tcp", f.RemoteAddr)
|
||||
cancel()
|
||||
if err != nil {
|
||||
logger.Warnf("通过代理连接远端失败:远端=%s 代理=%s://%s 错误=%v", f.RemoteAddr, f.ProxyType, f.ProxyAddr, err)
|
||||
return
|
||||
}
|
||||
defer remoteConn.Close()
|
||||
|
||||
errc := make(chan error, 2)
|
||||
var closeOnce sync.Once
|
||||
closeBoth := func() {
|
||||
_ = localConn.Close()
|
||||
_ = remoteConn.Close()
|
||||
}
|
||||
go func() {
|
||||
_, copyErr := io.Copy(remoteConn, localConn)
|
||||
closeOnce.Do(closeBoth)
|
||||
errc <- copyErr
|
||||
}()
|
||||
go func() {
|
||||
_, copyErr := io.Copy(localConn, remoteConn)
|
||||
closeOnce.Do(closeBoth)
|
||||
errc <- copyErr
|
||||
}()
|
||||
<-errc
|
||||
<-errc
|
||||
}
|
||||
|
||||
func (f *LocalForwarder) Close() error {
|
||||
var err error
|
||||
f.closeOnce.Do(func() {
|
||||
f.closedMu.Lock()
|
||||
f.closed = true
|
||||
f.closedMu.Unlock()
|
||||
close(f.closeChan)
|
||||
err = f.listener.Close()
|
||||
if err != nil {
|
||||
logger.Warnf("关闭代理端口转发失败:%v", err)
|
||||
}
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (f *LocalForwarder) IsClosed() bool {
|
||||
f.closedMu.RLock()
|
||||
defer f.closedMu.RUnlock()
|
||||
return f.closed
|
||||
}
|
||||
|
||||
func CloseAllForwarders() {
|
||||
forwarderMu.Lock()
|
||||
defer forwarderMu.Unlock()
|
||||
|
||||
for key, forwarder := range localForwarders {
|
||||
if forwarder == nil {
|
||||
continue
|
||||
}
|
||||
_ = forwarder.Close()
|
||||
logger.Infof("已关闭代理端口转发:%s", key)
|
||||
}
|
||||
localForwarders = make(map[string]*LocalForwarder)
|
||||
}
|
||||
|
||||
func DialContext(ctx context.Context, proxyConfig connection.ProxyConfig, network, address string) (net.Conn, error) {
|
||||
cfg, err := NormalizeConfig(proxyConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dialThroughProxy(ctx, cfg, network, address)
|
||||
}
|
||||
|
||||
func dialThroughProxy(ctx context.Context, cfg connection.ProxyConfig, network, address string) (net.Conn, error) {
|
||||
switch cfg.Type {
|
||||
case "socks5":
|
||||
return dialSOCKS5(ctx, cfg, network, address)
|
||||
case "http":
|
||||
return dialHTTPConnect(ctx, cfg, address)
|
||||
default:
|
||||
return nil, fmt.Errorf("不支持的代理类型:%s", cfg.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func dialSOCKS5(ctx context.Context, cfg connection.ProxyConfig, network, address string) (net.Conn, error) {
|
||||
proxyAddr := net.JoinHostPort(cfg.Host, fmt.Sprintf("%d", cfg.Port))
|
||||
var auth *xproxy.Auth
|
||||
if cfg.User != "" || cfg.Password != "" {
|
||||
auth = &xproxy.Auth{
|
||||
User: cfg.User,
|
||||
Password: cfg.Password,
|
||||
}
|
||||
}
|
||||
dialer, err := xproxy.SOCKS5("tcp", proxyAddr, auth, &net.Dialer{Timeout: defaultDialTimeout})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("创建 SOCKS5 代理拨号器失败:%w", err)
|
||||
}
|
||||
|
||||
type result struct {
|
||||
conn net.Conn
|
||||
err error
|
||||
}
|
||||
ch := make(chan result, 1)
|
||||
go func() {
|
||||
conn, dialErr := dialer.Dial(network, address)
|
||||
ch <- result{conn: conn, err: dialErr}
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
go func() {
|
||||
r := <-ch
|
||||
if r.conn != nil {
|
||||
_ = r.conn.Close()
|
||||
}
|
||||
}()
|
||||
return nil, ctx.Err()
|
||||
case r := <-ch:
|
||||
if r.err != nil {
|
||||
return nil, fmt.Errorf("SOCKS5 代理连接失败:%w", r.err)
|
||||
}
|
||||
return r.conn, nil
|
||||
}
|
||||
}
|
||||
|
||||
func dialHTTPConnect(ctx context.Context, cfg connection.ProxyConfig, address string) (net.Conn, error) {
|
||||
proxyAddr := net.JoinHostPort(cfg.Host, fmt.Sprintf("%d", cfg.Port))
|
||||
dialer := &net.Dialer{Timeout: defaultDialTimeout}
|
||||
conn, err := dialer.DialContext(ctx, "tcp", proxyAddr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("连接 HTTP 代理失败:%w", err)
|
||||
}
|
||||
|
||||
connectReq := &http.Request{
|
||||
Method: http.MethodConnect,
|
||||
URL: &url.URL{Opaque: address},
|
||||
Host: address,
|
||||
Header: make(http.Header),
|
||||
}
|
||||
if cfg.User != "" || cfg.Password != "" {
|
||||
raw := cfg.User + ":" + cfg.Password
|
||||
connectReq.Header.Set("Proxy-Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(raw)))
|
||||
}
|
||||
if err := connectReq.Write(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, fmt.Errorf("发送 HTTP CONNECT 请求失败:%w", err)
|
||||
}
|
||||
|
||||
reader := bufio.NewReader(conn)
|
||||
resp, err := http.ReadResponse(reader, connectReq)
|
||||
if err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, fmt.Errorf("读取 HTTP CONNECT 响应失败:%w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_ = conn.Close()
|
||||
return nil, fmt.Errorf("HTTP 代理 CONNECT 失败:%s", strings.TrimSpace(resp.Status))
|
||||
}
|
||||
|
||||
if reader.Buffered() == 0 {
|
||||
return conn, nil
|
||||
}
|
||||
return &bufferedConn{Conn: conn, reader: reader}, nil
|
||||
}
|
||||
|
||||
type bufferedConn struct {
|
||||
net.Conn
|
||||
reader *bufio.Reader
|
||||
}
|
||||
|
||||
func (c *bufferedConn) Read(p []byte) (int, error) {
|
||||
if c.reader == nil {
|
||||
return c.Conn.Read(p)
|
||||
}
|
||||
if c.reader.Buffered() == 0 {
|
||||
c.reader = nil
|
||||
return c.Conn.Read(p)
|
||||
}
|
||||
return c.reader.Read(p)
|
||||
}
|
||||
44
internal/proxy/proxy_test.go
Normal file
44
internal/proxy/proxy_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package proxy
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestNormalizeConfigSupportsSocks5hAlias(t *testing.T) {
|
||||
cfg, err := NormalizeConfig(connection.ProxyConfig{
|
||||
Type: "SOCKS5H",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1080,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("NormalizeConfig returned error: %v", err)
|
||||
}
|
||||
if cfg.Type != "socks5" {
|
||||
t.Fatalf("expected normalized proxy type socks5, got %s", cfg.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func TestForwarderCacheKeyIncludesCredentialFingerprint(t *testing.T) {
|
||||
base := connection.ProxyConfig{
|
||||
Type: "socks5",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1080,
|
||||
User: "tester",
|
||||
Password: "first-password",
|
||||
}
|
||||
other := base
|
||||
other.Password = "second-password"
|
||||
|
||||
keyA := forwarderCacheKey(base, "db.internal", 3306)
|
||||
keyB := forwarderCacheKey(other, "db.internal", 3306)
|
||||
|
||||
if keyA == keyB {
|
||||
t.Fatalf("expected different cache key for different credentials")
|
||||
}
|
||||
if strings.Contains(keyA, base.Password) || strings.Contains(keyB, other.Password) {
|
||||
t.Fatalf("cache key should not contain raw password")
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ type RedisValue struct {
|
||||
|
||||
// RedisDBInfo represents information about a Redis database
|
||||
type RedisDBInfo struct {
|
||||
Index int `json:"index"` // Database index (0-15)
|
||||
Index int `json:"index"` // Database index (single: 0-15, cluster: logical 0-15)
|
||||
Keys int64 `json:"keys"` // Number of keys in this database
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ type RedisKeyInfo struct {
|
||||
// RedisScanResult represents the result of a SCAN operation
|
||||
type RedisScanResult struct {
|
||||
Keys []RedisKeyInfo `json:"keys"`
|
||||
Cursor uint64 `json:"cursor"`
|
||||
Cursor string `json:"cursor"`
|
||||
}
|
||||
|
||||
// RedisClient defines the interface for Redis operations
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1
main.go
1
main.go
@@ -41,6 +41,7 @@ func main() {
|
||||
BackdropType: windows.Acrylic,
|
||||
DisableWindowIcon: false,
|
||||
DisableFramelessWindowDecorations: false,
|
||||
WebviewUserDataPath: resolveWindowsWebviewUserDataPath(),
|
||||
},
|
||||
Mac: &mac.Options{
|
||||
WebviewIsTransparent: true,
|
||||
|
||||
123
main_windows_webview_userdata.go
Normal file
123
main_windows_webview_userdata.go
Normal file
@@ -0,0 +1,123 @@
|
||||
//go:build windows
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func resolveWindowsWebviewUserDataPath() string {
|
||||
appDataDir := strings.TrimSpace(os.Getenv("APPDATA"))
|
||||
if appDataDir == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
targetDir := filepath.Join(appDataDir, "GoNavi", "WebView2")
|
||||
_ = migrateLegacyWindowsWebviewUserData(appDataDir, targetDir)
|
||||
return targetDir
|
||||
}
|
||||
|
||||
func migrateLegacyWindowsWebviewUserData(appDataDir, targetDir string) error {
|
||||
if dirHasContent(targetDir) {
|
||||
return nil
|
||||
}
|
||||
|
||||
exeName := "GoNavi.exe"
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
base := strings.TrimSpace(filepath.Base(exePath))
|
||||
if base != "" {
|
||||
exeName = base
|
||||
}
|
||||
}
|
||||
exeBase := strings.TrimSuffix(exeName, filepath.Ext(exeName))
|
||||
|
||||
candidates := []string{
|
||||
filepath.Join(appDataDir, exeName),
|
||||
filepath.Join(appDataDir, exeBase),
|
||||
filepath.Join(appDataDir, "GoNavi.exe"),
|
||||
filepath.Join(appDataDir, "GoNavi"),
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(candidates))
|
||||
for _, candidate := range candidates {
|
||||
src := filepath.Clean(strings.TrimSpace(candidate))
|
||||
if src == "" || strings.EqualFold(src, filepath.Clean(targetDir)) {
|
||||
continue
|
||||
}
|
||||
key := strings.ToLower(src)
|
||||
if _, exists := seen[key]; exists {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
|
||||
if !dirHasContent(src) {
|
||||
continue
|
||||
}
|
||||
return copyDirTree(src, targetDir)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func dirHasContent(path string) bool {
|
||||
info, err := os.Stat(path)
|
||||
if err != nil || !info.IsDir() {
|
||||
return false
|
||||
}
|
||||
entries, err := os.ReadDir(path)
|
||||
return err == nil && len(entries) > 0
|
||||
}
|
||||
|
||||
func copyDirTree(srcDir, dstDir string) error {
|
||||
if err := os.MkdirAll(dstDir, 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return filepath.WalkDir(srcDir, func(srcPath string, d os.DirEntry, walkErr error) error {
|
||||
if walkErr != nil {
|
||||
return walkErr
|
||||
}
|
||||
relPath, err := filepath.Rel(srcDir, srcPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if relPath == "." {
|
||||
return nil
|
||||
}
|
||||
dstPath := filepath.Join(dstDir, relPath)
|
||||
|
||||
if d.IsDir() {
|
||||
return os.MkdirAll(dstPath, 0o755)
|
||||
}
|
||||
|
||||
info, err := d.Info()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return copyFileWithMode(srcPath, dstPath, info.Mode())
|
||||
})
|
||||
}
|
||||
|
||||
func copyFileWithMode(srcPath, dstPath string, mode os.FileMode) error {
|
||||
srcFile, err := os.Open(srcPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer srcFile.Close()
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(dstPath), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
dstFile, err := os.OpenFile(dstPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, mode.Perm())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dstFile.Close()
|
||||
|
||||
if _, err := io.Copy(dstFile, srcFile); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
7
main_windows_webview_userdata_stub.go
Normal file
7
main_windows_webview_userdata_stub.go
Normal file
@@ -0,0 +1,7 @@
|
||||
//go:build !windows
|
||||
|
||||
package main
|
||||
|
||||
func resolveWindowsWebviewUserDataPath() string {
|
||||
return ""
|
||||
}
|
||||
BIN
optional-driver-agent
Executable file
BIN
optional-driver-agent
Executable file
Binary file not shown.
Reference in New Issue
Block a user