mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-12 12:19:47 +08:00
Compare commits
28 Commits
v0.1.2
...
feature/da
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46c48c5ea8 | ||
|
|
e3bf160072 | ||
|
|
80fbfd6365 | ||
|
|
2ca27ebfb0 | ||
|
|
aa7651d95c | ||
|
|
99f947e577 | ||
|
|
99c21f4fd4 | ||
|
|
aca1e712b8 | ||
|
|
ba58cd07c5 | ||
|
|
aeb8c29a72 | ||
|
|
6dcfba1066 | ||
|
|
3f272e4b13 | ||
|
|
c921d85813 | ||
|
|
fef33ef602 | ||
|
|
67bca419f9 | ||
|
|
32bbe257fe | ||
|
|
390cea337b | ||
|
|
0638dbca04 | ||
|
|
d5a65f1004 | ||
|
|
0cff590173 | ||
|
|
9aa9df1a43 | ||
|
|
0387560f07 | ||
|
|
1e2307c8f5 | ||
|
|
f6b56c21aa | ||
|
|
d3afd9aeb2 | ||
|
|
4d77863a06 | ||
|
|
2626ce198c | ||
|
|
7eb42aca62 |
67
.github/workflows/release.yml
vendored
67
.github/workflows/release.yml
vendored
@@ -9,7 +9,8 @@ permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build-and-release:
|
||||
# Phase 1: Build in parallel and output artifacts
|
||||
build:
|
||||
name: Build ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
@@ -19,12 +20,15 @@ jobs:
|
||||
- os: macos-latest
|
||||
platform: darwin/amd64
|
||||
artifact_name: GoNavi-mac-amd64
|
||||
asset_ext: .dmg
|
||||
- os: macos-latest
|
||||
platform: darwin/arm64
|
||||
artifact_name: GoNavi-mac-arm64
|
||||
asset_ext: .dmg
|
||||
- os: windows-latest
|
||||
platform: windows/amd64
|
||||
artifact_name: GoNavi-windows-amd64
|
||||
asset_ext: .exe
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -49,36 +53,26 @@ jobs:
|
||||
run: |
|
||||
wails build -platform ${{ matrix.platform }} -clean -o ${{ matrix.artifact_name }}
|
||||
|
||||
- name: Package macOS Application
|
||||
# macOS Packaging
|
||||
- name: Package macOS DMG
|
||||
if: contains(matrix.platform, 'darwin')
|
||||
run: |
|
||||
# Install create-dmg
|
||||
brew install create-dmg
|
||||
|
||||
cd build/bin
|
||||
echo "📂 列出 build/bin 目录内容:"
|
||||
ls -F
|
||||
|
||||
# Find .app bundle
|
||||
APP_PATH=$(find . -maxdepth 1 -name "*.app" | head -n 1)
|
||||
|
||||
if [ -z "$APP_PATH" ]; then
|
||||
echo "❌ 未找到 .app 应用包!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get pure name (e.g. GoNavi.app)
|
||||
APP_NAME=$(basename "$APP_PATH")
|
||||
|
||||
# Ad-hoc codesign to prevent "Damaged" error (requires user to allow anyway, but valid structure)
|
||||
echo "🔏 正在进行 Ad-hoc 签名..."
|
||||
codesign --force --options runtime --deep --sign - "$APP_NAME"
|
||||
|
||||
DMG_NAME="${{ matrix.artifact_name }}.dmg"
|
||||
|
||||
echo "📦 正在生成 DMG: $DMG_NAME..."
|
||||
|
||||
# Create DMG
|
||||
create-dmg \
|
||||
--volname "GoNavi Installer" \
|
||||
--window-pos 200 120 \
|
||||
@@ -90,24 +84,19 @@ jobs:
|
||||
"$DMG_NAME" \
|
||||
"$APP_NAME"
|
||||
|
||||
# Move DMG to root for upload
|
||||
mv "$DMG_NAME" "../../$DMG_NAME"
|
||||
mv "$DMG_NAME" ../../
|
||||
|
||||
- name: Package Windows Executable
|
||||
# Windows Packaging
|
||||
- name: Prepare Windows Exe
|
||||
if: contains(matrix.platform, 'windows')
|
||||
shell: bash
|
||||
run: |
|
||||
cd build/bin
|
||||
echo "📂 列出 build/bin 目录内容:"
|
||||
ls -F
|
||||
|
||||
TARGET="${{ matrix.artifact_name }}"
|
||||
|
||||
if [ -f "$TARGET.exe" ]; then
|
||||
echo "✅ 找到 $TARGET.exe"
|
||||
FINAL_EXE="$TARGET.exe"
|
||||
elif [ -f "$TARGET" ]; then
|
||||
echo "⚠️ 找到无后缀文件 $TARGET,正在添加 .exe 后缀..."
|
||||
mv "$TARGET" "$TARGET.exe"
|
||||
FINAL_EXE="$TARGET.exe"
|
||||
else
|
||||
@@ -116,14 +105,38 @@ jobs:
|
||||
fi
|
||||
|
||||
echo "📦 正在移动 $FINAL_EXE 到根目录..."
|
||||
mv "$FINAL_EXE" "../../${{ matrix.artifact_name }}.exe"
|
||||
mv "$FINAL_EXE" "../../$FINAL_EXE"
|
||||
|
||||
- name: Upload Release Asset
|
||||
# Upload to Actions Artifacts (Temporary Storage)
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-artifacts-${{ strategy.job-index }} # Unique name per job
|
||||
path: GoNavi-*${{ matrix.asset_ext }}
|
||||
retention-days: 1
|
||||
|
||||
# Phase 2: Collect all artifacts and Publish Release (Single Job)
|
||||
release:
|
||||
name: Publish Release
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download All Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: release-assets
|
||||
pattern: build-artifacts-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: List Assets
|
||||
run: ls -R release-assets
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.artifact_name }}.dmg
|
||||
${{ matrix.artifact_name }}.exe
|
||||
files: release-assets/*
|
||||
draft: true
|
||||
make_latest: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -14,4 +14,6 @@ node_modules/
|
||||
dist/
|
||||
.DS_Store
|
||||
.gemini-clipboard
|
||||
GoNavi-Wails
|
||||
GoNavi-Wails
|
||||
GoNavi-Wails.exe
|
||||
.ace-tool/
|
||||
|
||||
22
README.md
22
README.md
@@ -1,21 +1,27 @@
|
||||
# GoNavi - 现代化的轻量级数据库管理工具
|
||||
<img width="1200" height="1116" alt="image" src="https://github.com/user-attachments/assets/d15fa9e9-5486-423b-a0e9-53b467e45432" />
|
||||
<img width="4096" height="2180" alt="image" src="https://github.com/user-attachments/assets/330ce49b-45f1-4919-ae14-75f7d47e5f73" />
|
||||
<img width="4096" height="2304" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
<img width="4096" height="2304" alt="image" src="https://github.com/user-attachments/assets/ec522145-5ceb-4481-ae46-a9251c89bdfc" />
|
||||
<img width="4096" height="2304" alt="image" src="https://github.com/user-attachments/assets/224a74e7-65df-4aef-9710-d8e82e3a70c1" />
|
||||
<img width="4096" height="2180" alt="image" src="https://github.com/user-attachments/assets/f0c57590-d987-4ecf-89b2-64efad60b6d7" />
|
||||
|
||||
[](https://go.dev/)
|
||||
[](https://go.dev/)
|
||||
[](https://wails.io)
|
||||
[](https://reactjs.org/)
|
||||
[](LICENSE)
|
||||
[](https://github.com/yangguofeng/GoNavi/actions)
|
||||
[](https://github.com/Syngnat/GoNavi/actions)
|
||||
|
||||
**GoNavi** 是一款基于 **Wails (Go)** 和 **React** 构建的现代化、高性能、跨平台数据库管理客户端。它旨在提供如原生应用般流畅的用户体验,同时保持极低的资源占用。
|
||||
|
||||
相比于 Electron 应用,GoNavi 的体积更小(~10MB),启动速度更快,内存占用更低。
|
||||
|
||||
<h2 align="center">📸 项目截图</h2>
|
||||
|
||||
<div align="center">
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/341cda98-79a5-4198-90f3-1335131ccde0" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/224a74e7-65df-4aef-9710-d8e82e3a70c1" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/ec522145-5ceb-4481-ae46-a9251c89bdfc" />
|
||||
<br />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/330ce49b-45f1-4919-ae14-75f7d47e5f73" />
|
||||
<img width="14%" alt="image" src="https://github.com/user-attachments/assets/d15fa9e9-5486-423b-a0e9-53b467e45432" />
|
||||
<img width="25%" alt="image" src="https://github.com/user-attachments/assets/f0c57590-d987-4ecf-89b2-64efad60b6d7" />
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## ✨ 核心特性
|
||||
|
||||
8
frontend/package-lock.json
generated
8
frontend/package-lock.json
generated
@@ -27,6 +27,7 @@
|
||||
"@types/react": "^18.2.43",
|
||||
"@types/react-dom": "^18.2.17",
|
||||
"@types/react-resizable": "^3.0.8",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.0.8"
|
||||
@@ -1565,6 +1566,13 @@
|
||||
"optional": true,
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@types/uuid": {
|
||||
"version": "9.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz",
|
||||
"integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@vitejs/plugin-react": {
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz",
|
||||
|
||||
@@ -1 +1 @@
|
||||
c1af19c07654ec9f98628c358ae49b1a
|
||||
d0f9366af59a6367ad3c7e2d4185ead4
|
||||
@@ -1,9 +1,11 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Layout, Button, ConfigProvider, theme, Dropdown, MenuProps, message } from 'antd';
|
||||
import zhCN from 'antd/locale/zh_CN';
|
||||
import { PlusOutlined, BulbOutlined, BulbFilled, ConsoleSqlOutlined, BugOutlined, SettingOutlined, UploadOutlined, DownloadOutlined } from '@ant-design/icons';
|
||||
import Sidebar from './components/Sidebar';
|
||||
import TabManager from './components/TabManager';
|
||||
import ConnectionModal from './components/ConnectionModal';
|
||||
import DataSyncModal from './components/DataSyncModal';
|
||||
import LogPanel from './components/LogPanel';
|
||||
import { useStore } from './store';
|
||||
import { SavedConnection } from './types';
|
||||
@@ -13,6 +15,7 @@ const { Sider, Content } = Layout;
|
||||
|
||||
function App() {
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [isSyncModalOpen, setIsSyncModalOpen] = useState(false);
|
||||
const [editingConnection, setEditingConnection] = useState<SavedConnection | null>(null);
|
||||
const { darkMode, toggleDarkMode, addTab, activeContext, connections, addConnection, tabs, activeTabId } = useStore();
|
||||
|
||||
@@ -77,6 +80,12 @@ function App() {
|
||||
};
|
||||
|
||||
const settingsMenu: MenuProps['items'] = [
|
||||
{
|
||||
key: 'sync',
|
||||
label: '数据同步',
|
||||
icon: <UploadOutlined rotate={90} />,
|
||||
onClick: () => setIsSyncModalOpen(true)
|
||||
},
|
||||
{
|
||||
key: 'import',
|
||||
label: '导入连接配置',
|
||||
@@ -216,6 +225,7 @@ function App() {
|
||||
|
||||
return (
|
||||
<ConfigProvider
|
||||
locale={zhCN}
|
||||
theme={{
|
||||
algorithm: darkMode ? theme.darkAlgorithm : theme.defaultAlgorithm,
|
||||
}}
|
||||
@@ -292,6 +302,10 @@ function App() {
|
||||
onClose={handleCloseModal}
|
||||
initialValues={editingConnection}
|
||||
/>
|
||||
<DataSyncModal
|
||||
open={isSyncModalOpen}
|
||||
onClose={() => setIsSyncModalOpen(false)}
|
||||
/>
|
||||
|
||||
{/* Ghost Resize Line for Sidebar */}
|
||||
<div
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Modal, Form, Input, InputNumber, Button, message, Checkbox, Divider, Collapse, Select, Alert } from 'antd';
|
||||
import { Modal, Form, Input, InputNumber, Button, message, Checkbox, Divider, Select, Alert, Card, Row, Col, Typography, Collapse } from 'antd';
|
||||
import { DatabaseOutlined, ConsoleSqlOutlined, FileTextOutlined, CloudServerOutlined, AppstoreAddOutlined } from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { MySQLConnect, MySQLGetDatabases } from '../../wailsjs/go/app/App';
|
||||
import { DBConnect, DBGetDatabases, TestConnection } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
|
||||
const { Meta } = Card;
|
||||
const { Text } = Typography;
|
||||
|
||||
const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialValues?: SavedConnection | null }> = ({ open, onClose, initialValues }) => {
|
||||
const [form] = Form.useForm();
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [useSSH, setUseSSH] = useState(false);
|
||||
const [dbType, setDbType] = useState('mysql');
|
||||
const [step, setStep] = useState(1); // 1: Select Type, 2: Configure
|
||||
const [testResult, setTestResult] = useState<{ type: 'success' | 'error', message: string } | null>(null);
|
||||
const [dbList, setDbList] = useState<string[]>([]);
|
||||
const addConnection = useStore((state) => state.addConnection);
|
||||
@@ -19,6 +24,8 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
setTestResult(null); // Reset test result
|
||||
setDbList([]);
|
||||
if (initialValues) {
|
||||
// Edit mode: Go directly to step 2
|
||||
setStep(2);
|
||||
form.setFieldsValue({
|
||||
type: initialValues.config.type,
|
||||
name: initialValues.name,
|
||||
@@ -34,10 +41,15 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
sshUser: initialValues.config.ssh?.user,
|
||||
sshPassword: initialValues.config.ssh?.password,
|
||||
sshKeyPath: initialValues.config.ssh?.keyPath,
|
||||
driver: (initialValues.config as any).driver,
|
||||
dsn: (initialValues.config as any).dsn,
|
||||
timeout: (initialValues.config as any).timeout || 30
|
||||
});
|
||||
setUseSSH(initialValues.config.useSSH || false);
|
||||
setDbType(initialValues.config.type);
|
||||
} else {
|
||||
// Create mode: Start at step 1
|
||||
setStep(1);
|
||||
form.resetFields();
|
||||
setUseSSH(false);
|
||||
setDbType('mysql');
|
||||
@@ -52,8 +64,7 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
|
||||
const config = await buildConfig(values);
|
||||
|
||||
// Use Connect to verify before saving
|
||||
const res = await MySQLConnect(config as any);
|
||||
const res = await DBConnect(config as any);
|
||||
setLoading(false);
|
||||
|
||||
if (res.success) {
|
||||
@@ -75,6 +86,7 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
form.resetFields();
|
||||
setUseSSH(false);
|
||||
setDbType('mysql');
|
||||
setStep(1);
|
||||
onClose();
|
||||
} else {
|
||||
message.error('连接失败: ' + res.message);
|
||||
@@ -88,14 +100,13 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
try {
|
||||
const values = await form.validateFields();
|
||||
setLoading(true);
|
||||
setTestResult(null); // Clear previous result
|
||||
setTestResult(null);
|
||||
const config = await buildConfig(values);
|
||||
const res = await (window as any).go.app.App.TestConnection(config);
|
||||
const res = await TestConnection(config as any);
|
||||
setLoading(false);
|
||||
if (res.success) {
|
||||
setTestResult({ type: 'success', message: res.message });
|
||||
// Fetch DB List on success
|
||||
const dbRes = await MySQLGetDatabases(config as any);
|
||||
const dbRes = await DBGetDatabases(config as any);
|
||||
if (dbRes.success) {
|
||||
const dbs = (dbRes.data as any[]).map((row: any) => row.Database || row.database);
|
||||
setDbList(dbs);
|
||||
@@ -119,58 +130,100 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
|
||||
return {
|
||||
type: values.type,
|
||||
host: values.host,
|
||||
host: values.host || "",
|
||||
port: Number(values.port || 0),
|
||||
user: values.user || "",
|
||||
password: values.password || "",
|
||||
database: values.database || "",
|
||||
useSSH: !!values.useSSH,
|
||||
ssh: sshConfig
|
||||
ssh: sshConfig,
|
||||
driver: values.driver,
|
||||
dsn: values.dsn,
|
||||
timeout: Number(values.timeout || 30)
|
||||
};
|
||||
};
|
||||
|
||||
const isSqlite = dbType === 'sqlite';
|
||||
const handleTypeSelect = (type: string) => {
|
||||
setDbType(type);
|
||||
form.setFieldsValue({ type: type });
|
||||
|
||||
// Auto-fill default port
|
||||
let defaultPort = 3306;
|
||||
switch (type) {
|
||||
case 'mysql': defaultPort = 3306; break;
|
||||
case 'postgres': defaultPort = 5432; break;
|
||||
case 'oracle': defaultPort = 1521; break;
|
||||
case 'dameng': defaultPort = 5236; break;
|
||||
case 'kingbase': defaultPort = 54321; break;
|
||||
default: defaultPort = 3306;
|
||||
}
|
||||
if (type !== 'sqlite' && type !== 'custom') {
|
||||
form.setFieldsValue({ port: defaultPort });
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={initialValues ? "编辑连接" : "新建连接"}
|
||||
open={open}
|
||||
onCancel={onClose}
|
||||
onOk={handleOk}
|
||||
confirmLoading={loading}
|
||||
footer={[
|
||||
<Button key="test" loading={loading} onClick={handleTest}>测试连接</Button>,
|
||||
<Button key="cancel" onClick={onClose}>取消</Button>,
|
||||
<Button key="submit" type="primary" loading={loading} onClick={handleOk}>保存</Button>
|
||||
]}
|
||||
width={600}
|
||||
zIndex={10001} // Increase z-index
|
||||
destroyOnHidden // Reset on close
|
||||
maskClosable={false} // Prevent accidental close by clicking mask, user must click X or Cancel
|
||||
>
|
||||
setStep(2);
|
||||
};
|
||||
|
||||
const isSqlite = dbType === 'sqlite';
|
||||
const isCustom = dbType === 'custom';
|
||||
|
||||
const dbTypes = [
|
||||
{ key: 'mysql', name: 'MySQL', icon: <ConsoleSqlOutlined style={{ fontSize: 24, color: '#00758F' }} /> },
|
||||
{ key: 'postgres', name: 'PostgreSQL', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#336791' }} /> },
|
||||
{ key: 'sqlite', name: 'SQLite', icon: <FileTextOutlined style={{ fontSize: 24, color: '#003B57' }} /> },
|
||||
{ key: 'oracle', name: 'Oracle', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#F80000' }} /> },
|
||||
{ key: 'dameng', name: 'Dameng (达梦)', icon: <CloudServerOutlined style={{ fontSize: 24, color: '#1890ff' }} /> },
|
||||
{ key: 'kingbase', name: 'Kingbase (人大金仓)', icon: <DatabaseOutlined style={{ fontSize: 24, color: '#faad14' }} /> },
|
||||
{ key: 'custom', name: 'Custom (自定义)', icon: <AppstoreAddOutlined style={{ fontSize: 24, color: '#595959' }} /> },
|
||||
];
|
||||
|
||||
const renderStep1 = () => (
|
||||
<Row gutter={[16, 16]}>
|
||||
{dbTypes.map(item => (
|
||||
<Col span={8} key={item.key}>
|
||||
<Card
|
||||
hoverable
|
||||
onClick={() => handleTypeSelect(item.key)}
|
||||
style={{ textAlign: 'center', cursor: 'pointer' }}
|
||||
>
|
||||
<div style={{ marginBottom: 12 }}>{item.icon}</div>
|
||||
<Text strong>{item.name}</Text>
|
||||
</Card>
|
||||
</Col>
|
||||
))}
|
||||
</Row>
|
||||
);
|
||||
|
||||
const renderStep2 = () => (
|
||||
<Form
|
||||
form={form}
|
||||
layout="vertical"
|
||||
initialValues={{ type: 'mysql', host: 'localhost', port: 3306, user: 'root', useSSH: false, sshPort: 22 }}
|
||||
initialValues={{ type: 'mysql', host: 'localhost', port: 3306, user: 'root', useSSH: false, sshPort: 22, timeout: 30 }}
|
||||
onValuesChange={(changed) => {
|
||||
if (testResult) setTestResult(null); // Clear result on change
|
||||
if (changed.useSSH !== undefined) setUseSSH(changed.useSSH);
|
||||
// Type change handled by step 1, but keep sync if select changes (hidden now)
|
||||
if (changed.type !== undefined) setDbType(changed.type);
|
||||
}}
|
||||
>
|
||||
<div style={{ display: 'flex', gap: 16 }}>
|
||||
<Form.Item name="type" label="数据库类型" style={{ width: 120 }}>
|
||||
<Select>
|
||||
<Select.Option value="mysql">MySQL</Select.Option>
|
||||
<Select.Option value="postgres">PostgreSQL</Select.Option>
|
||||
<Select.Option value="sqlite">SQLite</Select.Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item name="name" label="连接名称" style={{ flex: 1 }}>
|
||||
<Input placeholder="例如:本地测试库" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
{/* Hidden Type Field to keep form value synced */}
|
||||
<Form.Item name="type" hidden><Input /></Form.Item>
|
||||
|
||||
<Form.Item name="name" label="连接名称">
|
||||
<Input placeholder="例如:本地测试库" />
|
||||
</Form.Item>
|
||||
|
||||
{isCustom ? (
|
||||
<>
|
||||
<Form.Item name="driver" label="驱动名称 (Driver Name)" rules={[{ required: true, message: '请输入驱动名称' }]} help="已支持: mysql, postgres, sqlite, oracle, dm, kingbase">
|
||||
<Input placeholder="例如: mysql, postgres" />
|
||||
</Form.Item>
|
||||
<Form.Item name="dsn" label="连接字符串 (DSN)" rules={[{ required: true, message: '请输入连接字符串' }]}>
|
||||
<Input.TextArea rows={3} placeholder="例如: user:pass@tcp(localhost:3306)/dbname?charset=utf8" />
|
||||
</Form.Item>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<div style={{ display: 'flex', gap: 16 }}>
|
||||
<Form.Item name="host" label={isSqlite ? "文件路径 (绝对路径)" : "主机地址 (Host)"} rules={[{ required: true, message: '请输入地址/路径' }]} style={{ flex: 1 }}>
|
||||
<Input placeholder={isSqlite ? "/path/to/db.sqlite" : "localhost"} />
|
||||
@@ -231,20 +284,76 @@ const ConnectionModal: React.FC<{ open: boolean; onClose: () => void; initialVal
|
||||
</Form.Item>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Divider style={{ margin: '12px 0' }} />
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
items={[{
|
||||
key: 'advanced',
|
||||
label: '高级连接',
|
||||
children: (
|
||||
<Form.Item
|
||||
name="timeout"
|
||||
label="连接超时 (秒)"
|
||||
help="数据库连接超时时间,默认 30 秒"
|
||||
rules={[{ type: 'number', min: 1, max: 300, message: '超时时间范围: 1-300 秒' }]}
|
||||
>
|
||||
<InputNumber style={{ width: '100%' }} min={1} max={300} placeholder="30" />
|
||||
</Form.Item>
|
||||
)
|
||||
}]}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</Form>
|
||||
|
||||
{testResult && (
|
||||
</>
|
||||
)}
|
||||
|
||||
{testResult && (
|
||||
<Alert
|
||||
message={testResult.message}
|
||||
type={testResult.type}
|
||||
showIcon
|
||||
style={{ marginTop: 16 }}
|
||||
/>
|
||||
)}
|
||||
)}
|
||||
</Form>
|
||||
);
|
||||
|
||||
const getFooter = () => {
|
||||
if (step === 1) {
|
||||
return [
|
||||
<Button key="cancel" onClick={onClose}>取消</Button>
|
||||
];
|
||||
}
|
||||
return [
|
||||
!initialValues && <Button key="back" onClick={() => setStep(1)} style={{ float: 'left' }}>上一步</Button>,
|
||||
<Button key="test" loading={loading} onClick={handleTest}>测试连接</Button>,
|
||||
<Button key="cancel" onClick={onClose}>取消</Button>,
|
||||
<Button key="submit" type="primary" loading={loading} onClick={handleOk}>保存</Button>
|
||||
];
|
||||
};
|
||||
|
||||
const getTitle = () => {
|
||||
if (step === 1) return "选择数据源类型";
|
||||
const typeName = dbTypes.find(t => t.key === dbType)?.name || dbType;
|
||||
return initialValues ? "编辑连接" : `新建 ${typeName} 连接`;
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={getTitle()}
|
||||
open={open}
|
||||
onCancel={onClose}
|
||||
footer={getFooter()}
|
||||
width={step === 1 ? 700 : 600}
|
||||
zIndex={10001}
|
||||
destroyOnHidden
|
||||
maskClosable={false}
|
||||
>
|
||||
{step === 1 ? renderStep1() : renderStep2()}
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default ConnectionModal;
|
||||
export default ConnectionModal;
|
||||
|
||||
@@ -2,20 +2,27 @@ import React, { useState, useEffect, useRef, useContext, useMemo, useCallback }
|
||||
import { Table, message, Input, Button, Dropdown, MenuProps, Form, Pagination, Select, Modal } from 'antd';
|
||||
import type { SortOrder } from 'antd/es/table/interface';
|
||||
import { ReloadOutlined, ImportOutlined, ExportOutlined, DownOutlined, PlusOutlined, DeleteOutlined, SaveOutlined, UndoOutlined, FilterOutlined, CloseOutlined, ConsoleSqlOutlined, FileTextOutlined, CopyOutlined, ClearOutlined } from '@ant-design/icons';
|
||||
import { Resizable } from 'react-resizable';
|
||||
import { ImportData, ExportTable, ExportData, ApplyChanges } from '../../wailsjs/go/app/App';
|
||||
import { useStore } from '../store';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import 'react-resizable/css/styles.css';
|
||||
|
||||
// 内部行标识字段:避免与真实业务字段(如 `key` 列)冲突。
|
||||
export const GONAVI_ROW_KEY = '__gonavi_row_key__';
|
||||
|
||||
// Normalize RFC3339-like datetime strings to `YYYY-MM-DD HH:mm:ss` for display/editing.
|
||||
const normalizeDateTimeString = (val: string) => {
|
||||
const match = val.match(/^(\d{4}-\d{2}-\d{2})T(\d{2}:\d{2}:\d{2})/);
|
||||
if (!match) return val;
|
||||
return `${match[1]} ${match[2]}`;
|
||||
};
|
||||
|
||||
// --- Helper: Format Value ---
|
||||
const formatCellValue = (val: any) => {
|
||||
if (val === null) return <span style={{ color: '#ccc' }}>NULL</span>;
|
||||
if (typeof val === 'object') return JSON.stringify(val);
|
||||
if (typeof val === 'string') {
|
||||
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(val)) {
|
||||
return val.replace('T', ' ').replace(/\+.*$/, '').replace(/Z$/, '');
|
||||
}
|
||||
return normalizeDateTimeString(val);
|
||||
}
|
||||
return String(val);
|
||||
};
|
||||
@@ -68,7 +75,6 @@ const DataContext = React.createContext<{
|
||||
} | null>(null);
|
||||
|
||||
interface Item {
|
||||
key: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
@@ -103,13 +109,15 @@ const EditableCell: React.FC<EditableCellProps> = React.memo(({
|
||||
|
||||
const toggleEdit = () => {
|
||||
setEditing(!editing);
|
||||
form.setFieldsValue({ [dataIndex]: record[dataIndex] });
|
||||
const raw = record[dataIndex];
|
||||
const initialValue = typeof raw === 'string' ? normalizeDateTimeString(raw) : raw;
|
||||
form.setFieldsValue({ [dataIndex]: initialValue });
|
||||
};
|
||||
|
||||
const save = async () => {
|
||||
try {
|
||||
if (!form) return;
|
||||
const values = await form.validateFields();
|
||||
const values = await form.validateFields([dataIndex]);
|
||||
toggleEdit();
|
||||
handleSave({ ...record, ...values });
|
||||
} catch (errInfo) {
|
||||
@@ -134,8 +142,7 @@ const EditableCell: React.FC<EditableCellProps> = React.memo(({
|
||||
return <td {...restProps} onDoubleClick={editable ? toggleEdit : undefined}>{childNode}</td>;
|
||||
});
|
||||
|
||||
const ContextMenuRow = React.memo(({ children, ...props }: any) => {
|
||||
const record = props.record;
|
||||
const ContextMenuRow = React.memo(({ children, record, ...props }: any) => {
|
||||
const context = useContext(DataContext);
|
||||
|
||||
if (!record || !context) return <tr {...props}>{children}</tr>;
|
||||
@@ -144,8 +151,9 @@ const ContextMenuRow = React.memo(({ children, ...props }: any) => {
|
||||
|
||||
const getTargets = () => {
|
||||
const keys = selectedRowKeysRef.current;
|
||||
if (keys.includes(record.key)) {
|
||||
return displayDataRef.current.filter(d => keys.includes(d.key));
|
||||
const recordKey = record?.[GONAVI_ROW_KEY];
|
||||
if (recordKey !== undefined && keys.includes(recordKey)) {
|
||||
return displayDataRef.current.filter(d => keys.includes(d?.[GONAVI_ROW_KEY]));
|
||||
}
|
||||
return [record];
|
||||
};
|
||||
@@ -162,7 +170,7 @@ const ContextMenuRow = React.memo(({ children, ...props }: any) => {
|
||||
{ key: 'copy', label: '复制为 Markdown', icon: <CopyOutlined />, onClick: () => {
|
||||
const records = getTargets();
|
||||
const lines = records.map((r: any) => {
|
||||
const { key, ...vals } = r;
|
||||
const { [GONAVI_ROW_KEY]: _rowKey, ...vals } = r;
|
||||
return `| ${Object.values(vals).join(' | ')} |`;
|
||||
});
|
||||
copyToClipboard(lines.join('\n'));
|
||||
@@ -200,7 +208,7 @@ interface DataGridProps {
|
||||
onReload?: () => void;
|
||||
onSort?: (field: string, order: string) => void;
|
||||
onPageChange?: (page: number, size: number) => void;
|
||||
pagination?: { current: number, pageSize: number, total: number };
|
||||
pagination?: { current: number, pageSize: number, total: number, totalKnown?: boolean };
|
||||
// Filtering
|
||||
showFilter?: boolean;
|
||||
onToggleFilter?: () => void;
|
||||
@@ -220,7 +228,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
// Helper to export specific data
|
||||
const exportData = async (rows: any[], format: string) => {
|
||||
const hide = message.loading(`正在导出 ${rows.length} 条数据...`, 0);
|
||||
const cleanRows = rows.map(({ key, ...rest }) => rest);
|
||||
const cleanRows = rows.map(({ [GONAVI_ROW_KEY]: _rowKey, ...rest }) => rest);
|
||||
// Pass tableName (or 'export') as default filename
|
||||
const res = await ExportData(cleanRows, columnNames, tableName || 'export', format);
|
||||
hide();
|
||||
@@ -261,7 +269,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const [selectedRowKeys, setSelectedRowKeys] = useState<React.Key[]>([]);
|
||||
const [addedRows, setAddedRows] = useState<any[]>([]);
|
||||
const [modifiedRows, setModifiedRows] = useState<Record<string, any>>({});
|
||||
const [deletedRowKeys, setDeletedRowKeys] = useState<Set<React.Key>>(new Set());
|
||||
const [deletedRowKeys, setDeletedRowKeys] = useState<Set<string>>(new Set());
|
||||
|
||||
// Filter State
|
||||
const [filterConditions, setFilterConditions] = useState<{ id: number, column: string, op: string, value: string }[]>([]);
|
||||
@@ -278,10 +286,16 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
setModifiedRows({});
|
||||
setDeletedRowKeys(new Set());
|
||||
setSelectedRowKeys([]);
|
||||
form.resetFields();
|
||||
}, [tableName, dbName, connectionId]); // Reset on context change
|
||||
|
||||
const rowKeyStr = useCallback((k: React.Key) => String(k), []);
|
||||
|
||||
const displayData = useMemo(() => {
|
||||
return [...data, ...addedRows].filter(item => !deletedRowKeys.has(item.key));
|
||||
return [...data, ...addedRows].filter(item => {
|
||||
const k = item?.[GONAVI_ROW_KEY];
|
||||
return k === undefined ? true : !deletedRowKeys.has(rowKeyStr(k));
|
||||
});
|
||||
}, [data, addedRows, deletedRowKeys]);
|
||||
|
||||
useEffect(() => { displayDataRef.current = displayData; }, [displayData]);
|
||||
@@ -304,10 +318,21 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const draggingRef = useRef<{
|
||||
startX: number,
|
||||
startWidth: number,
|
||||
key: string
|
||||
key: string,
|
||||
containerLeft: number
|
||||
} | null>(null);
|
||||
const ghostRef = useRef<HTMLDivElement>(null);
|
||||
const resizeRafRef = useRef<number | null>(null);
|
||||
const latestClientXRef = useRef<number | null>(null);
|
||||
const isResizingRef = useRef(false); // Lock for sorting
|
||||
|
||||
const flushGhostPosition = useCallback(() => {
|
||||
resizeRafRef.current = null;
|
||||
if (!draggingRef.current || !ghostRef.current) return;
|
||||
if (latestClientXRef.current === null) return;
|
||||
const relativeLeft = latestClientXRef.current - draggingRef.current.containerLeft;
|
||||
ghostRef.current.style.transform = `translateX(${relativeLeft}px)`;
|
||||
}, []);
|
||||
|
||||
// 1. Drag Start
|
||||
|
||||
@@ -327,21 +352,18 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
|
||||
const currentWidth = columnWidths[key] || 200;
|
||||
|
||||
|
||||
const containerLeft = containerRef.current?.getBoundingClientRect().left ?? 0;
|
||||
|
||||
draggingRef.current = { startX, startWidth: currentWidth, key };
|
||||
draggingRef.current = { startX, startWidth: currentWidth, key, containerLeft };
|
||||
latestClientXRef.current = startX;
|
||||
|
||||
|
||||
|
||||
// Show Ghost Line at initial position
|
||||
|
||||
if (ghostRef.current && containerRef.current) {
|
||||
|
||||
const containerRect = containerRef.current.getBoundingClientRect();
|
||||
|
||||
const relativeLeft = startX - containerRect.left;
|
||||
|
||||
ghostRef.current.style.left = `${relativeLeft}px`;
|
||||
const relativeLeft = startX - containerLeft;
|
||||
ghostRef.current.style.transform = `translateX(${relativeLeft}px)`;
|
||||
|
||||
ghostRef.current.style.display = 'block';
|
||||
|
||||
@@ -363,13 +385,11 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
|
||||
// 2. Drag Move (Global)
|
||||
const handleResizeMove = useCallback((e: MouseEvent) => {
|
||||
if (!draggingRef.current || !ghostRef.current || !containerRef.current) return;
|
||||
|
||||
// Update Ghost Line Position directly
|
||||
const containerRect = containerRef.current.getBoundingClientRect();
|
||||
const relativeLeft = e.clientX - containerRect.left;
|
||||
ghostRef.current.style.left = `${relativeLeft}px`;
|
||||
}, []);
|
||||
if (!draggingRef.current) return;
|
||||
latestClientXRef.current = e.clientX;
|
||||
if (resizeRafRef.current !== null) return;
|
||||
resizeRafRef.current = requestAnimationFrame(flushGhostPosition);
|
||||
}, [flushGhostPosition]);
|
||||
|
||||
// 3. Drag Stop (Global)
|
||||
const handleResizeStop = useCallback((e: MouseEvent) => {
|
||||
@@ -383,6 +403,11 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
setColumnWidths(prev => ({ ...prev, [key]: newWidth }));
|
||||
|
||||
// Cleanup
|
||||
if (resizeRafRef.current !== null) {
|
||||
cancelAnimationFrame(resizeRafRef.current);
|
||||
resizeRafRef.current = null;
|
||||
}
|
||||
latestClientXRef.current = null;
|
||||
if (ghostRef.current) ghostRef.current.style.display = 'none';
|
||||
document.removeEventListener('mousemove', handleResizeMove);
|
||||
document.removeEventListener('mouseup', handleResizeStop);
|
||||
@@ -405,11 +430,13 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
// So we update 'modifiedRows'.
|
||||
|
||||
// Check if it's an added row
|
||||
const isAdded = addedRows.some(r => r.key === row.key);
|
||||
const rowKey = row?.[GONAVI_ROW_KEY];
|
||||
if (rowKey === undefined) return;
|
||||
const isAdded = addedRows.some(r => r?.[GONAVI_ROW_KEY] === rowKey);
|
||||
if (isAdded) {
|
||||
setAddedRows(prev => prev.map(r => r.key === row.key ? { ...r, ...row } : r));
|
||||
setAddedRows(prev => prev.map(r => r?.[GONAVI_ROW_KEY] === rowKey ? { ...r, ...row } : r));
|
||||
} else {
|
||||
setModifiedRows(prev => ({ ...prev, [row.key]: row }));
|
||||
setModifiedRows(prev => ({ ...prev, [rowKeyStr(rowKey)]: row }));
|
||||
}
|
||||
}, [addedRows]);
|
||||
|
||||
@@ -418,8 +445,9 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
// We need to merge modifiedRows into it for rendering.
|
||||
const mergedDisplayData = useMemo(() => {
|
||||
return displayData.map(row => {
|
||||
if (modifiedRows[row.key]) {
|
||||
return { ...row, ...modifiedRows[row.key] };
|
||||
const k = row?.[GONAVI_ROW_KEY];
|
||||
if (k !== undefined && modifiedRows[rowKeyStr(k)]) {
|
||||
return { ...row, ...modifiedRows[rowKeyStr(k)] };
|
||||
}
|
||||
return row;
|
||||
});
|
||||
@@ -459,7 +487,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
|
||||
const handleAddRow = () => {
|
||||
const newKey = `new-${Date.now()}`;
|
||||
const newRow: any = { key: newKey };
|
||||
const newRow: any = { [GONAVI_ROW_KEY]: newKey };
|
||||
columnNames.forEach(col => newRow[col] = '');
|
||||
setAddedRows(prev => [...prev, newRow]);
|
||||
};
|
||||
@@ -467,7 +495,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const handleDeleteSelected = () => {
|
||||
setDeletedRowKeys(prev => {
|
||||
const newDeleted = new Set(prev);
|
||||
selectedRowKeys.forEach(key => newDeleted.add(key));
|
||||
selectedRowKeys.forEach(key => newDeleted.add(rowKeyStr(key)));
|
||||
return newDeleted;
|
||||
});
|
||||
setSelectedRowKeys([]);
|
||||
@@ -482,27 +510,27 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const updates: any[] = [];
|
||||
const deletes: any[] = [];
|
||||
|
||||
addedRows.forEach(row => { const { key, ...vals } = row; inserts.push(vals); });
|
||||
deletedRowKeys.forEach(key => {
|
||||
addedRows.forEach(row => { const { [GONAVI_ROW_KEY]: _rowKey, ...vals } = row; inserts.push(vals); });
|
||||
deletedRowKeys.forEach(keyStr => {
|
||||
// Find original data
|
||||
const originalRow = data.find(d => d.key === key) || addedRows.find(d => d.key === key);
|
||||
const originalRow = data.find(d => rowKeyStr(d?.[GONAVI_ROW_KEY]) === keyStr) || addedRows.find(d => rowKeyStr(d?.[GONAVI_ROW_KEY]) === keyStr);
|
||||
if (originalRow) {
|
||||
const pkData: any = {};
|
||||
if (pkColumns.length > 0) pkColumns.forEach(k => pkData[k] = originalRow[k]);
|
||||
else { const { key: _, ...rest } = originalRow; Object.assign(pkData, rest); }
|
||||
else { const { [GONAVI_ROW_KEY]: _rowKey, ...rest } = originalRow; Object.assign(pkData, rest); }
|
||||
deletes.push(pkData);
|
||||
}
|
||||
});
|
||||
Object.entries(modifiedRows).forEach(([key, newRow]) => {
|
||||
if (deletedRowKeys.has(key)) return;
|
||||
const originalRow = data.find(d => d.key === key);
|
||||
Object.entries(modifiedRows).forEach(([keyStr, newRow]) => {
|
||||
if (deletedRowKeys.has(keyStr)) return;
|
||||
const originalRow = data.find(d => rowKeyStr(d?.[GONAVI_ROW_KEY]) === keyStr);
|
||||
if (!originalRow) return; // Should not happen for modified rows unless deleted
|
||||
|
||||
const pkData: any = {};
|
||||
if (pkColumns.length > 0) pkColumns.forEach(k => pkData[k] = originalRow[k]);
|
||||
else { const { key: _, ...rest } = originalRow; Object.assign(pkData, rest); }
|
||||
else { const { [GONAVI_ROW_KEY]: _rowKey, ...rest } = originalRow; Object.assign(pkData, rest); }
|
||||
|
||||
const { key: _, ...vals } = newRow;
|
||||
const { [GONAVI_ROW_KEY]: _rowKey, ...vals } = newRow;
|
||||
updates.push({ keys: pkData, values: vals });
|
||||
});
|
||||
|
||||
@@ -567,8 +595,9 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const getTargets = useCallback((clickedRecord: any) => {
|
||||
const selKeys = selectedRowKeysRef.current;
|
||||
const currentData = displayDataRef.current;
|
||||
if (selKeys.includes(clickedRecord.key)) {
|
||||
return currentData.filter(d => selKeys.includes(d.key));
|
||||
const clickedKey = clickedRecord?.[GONAVI_ROW_KEY];
|
||||
if (clickedKey !== undefined && selKeys.includes(clickedKey)) {
|
||||
return currentData.filter(d => selKeys.includes(d?.[GONAVI_ROW_KEY]));
|
||||
}
|
||||
return [clickedRecord];
|
||||
}, []);
|
||||
@@ -576,7 +605,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const handleCopyInsert = useCallback((record: any) => {
|
||||
const records = getTargets(record);
|
||||
const sqls = records.map((r: any) => {
|
||||
const { key, ...vals } = r;
|
||||
const { [GONAVI_ROW_KEY]: _rowKey, ...vals } = r;
|
||||
const cols = Object.keys(vals);
|
||||
const values = Object.values(vals).map(v => v === null ? 'NULL' : `'${v}'`);
|
||||
const targetTable = tableName || 'table';
|
||||
@@ -588,7 +617,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const handleCopyJson = useCallback((record: any) => {
|
||||
const records = getTargets(record);
|
||||
const cleanRecords = records.map((r: any) => {
|
||||
const { key, ...rest } = r;
|
||||
const { [GONAVI_ROW_KEY]: _rowKey, ...rest } = r;
|
||||
return rest;
|
||||
});
|
||||
copyToClipboard(JSON.stringify(cleanRecords, null, 2));
|
||||
@@ -597,7 +626,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
const handleCopyCsv = useCallback((record: any) => {
|
||||
const records = getTargets(record);
|
||||
const lines = records.map((r: any) => {
|
||||
const { key, ...vals } = r;
|
||||
const { [GONAVI_ROW_KEY]: _rowKey, ...vals } = r;
|
||||
const values = Object.values(vals).map(v => v === null ? 'NULL' : `"${v}"`);
|
||||
return values.join(',');
|
||||
});
|
||||
@@ -616,7 +645,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
|
||||
// 1. Export Selected
|
||||
if (selectedRowKeys.length > 0) {
|
||||
const selectedRows = displayData.filter(d => selectedRowKeys.includes(d.key));
|
||||
const selectedRows = displayData.filter(d => selectedRowKeys.includes(d?.[GONAVI_ROW_KEY]));
|
||||
await exportData(selectedRows, format);
|
||||
return;
|
||||
}
|
||||
@@ -695,6 +724,7 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
}), []);
|
||||
|
||||
const totalWidth = columns.reduce((sum, col) => sum + (col.width as number || 200), 0);
|
||||
const enableVirtual = mergedDisplayData.length >= 200;
|
||||
|
||||
return (
|
||||
<div className={gridId} style={{ height: '100%', overflow: 'hidden', padding: 0, display: 'flex', flexDirection: 'column', minHeight: 0 }}>
|
||||
@@ -770,7 +800,9 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
columns={mergedColumns}
|
||||
size="small"
|
||||
scroll={{ x: Math.max(totalWidth, 1000), y: tableHeight }}
|
||||
virtual={enableVirtual}
|
||||
loading={loading}
|
||||
rowKey={GONAVI_ROW_KEY}
|
||||
pagination={false}
|
||||
onChange={handleTableChange}
|
||||
bordered
|
||||
@@ -779,8 +811,9 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
onChange: setSelectedRowKeys,
|
||||
}}
|
||||
rowClassName={(record) => {
|
||||
if (addedRows.some(r => r.key === record.key)) return 'row-added';
|
||||
if (modifiedRows[record.key] || deletedRowKeys.has(record.key)) return 'row-modified'; // deleted won't show
|
||||
const k = record?.[GONAVI_ROW_KEY];
|
||||
if (k !== undefined && addedRows.some(r => r?.[GONAVI_ROW_KEY] === k)) return 'row-added';
|
||||
if (k !== undefined && (modifiedRows[rowKeyStr(k)] || deletedRowKeys.has(rowKeyStr(k)))) return 'row-modified'; // deleted won't show
|
||||
return '';
|
||||
}}
|
||||
onRow={(record) => ({ record } as any)}
|
||||
@@ -792,11 +825,15 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
|
||||
{pagination && (
|
||||
<div style={{ padding: '8px', borderTop: '1px solid #eee', display: 'flex', justifyContent: 'flex-end', background: '#fff' }}>
|
||||
<Pagination
|
||||
<Pagination
|
||||
current={pagination.current}
|
||||
pageSize={pagination.pageSize}
|
||||
total={pagination.total}
|
||||
showTotal={(total, range) => `当前 ${range[1] - range[0] + 1} 条 / 共 ${total} 条`}
|
||||
showTotal={(total, range) => {
|
||||
const currentCount = Math.max(0, range[1] - range[0] + 1);
|
||||
if (pagination.totalKnown === false) return `当前 ${currentCount} 条 / 正在统计总数...`;
|
||||
return `当前 ${currentCount} 条 / 共 ${total} 条`;
|
||||
}}
|
||||
showSizeChanger
|
||||
pageSizeOptions={['100', '200', '500', '1000']}
|
||||
onChange={onPageChange}
|
||||
@@ -821,11 +858,13 @@ const DataGrid: React.FC<DataGridProps> = ({
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
bottom: 0, // Fits container height
|
||||
left: 0,
|
||||
width: '2px',
|
||||
background: '#1890ff',
|
||||
zIndex: 9999,
|
||||
display: 'none',
|
||||
pointerEvents: 'none'
|
||||
pointerEvents: 'none',
|
||||
willChange: 'transform'
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
807
frontend/src/components/DataSyncModal.tsx
Normal file
807
frontend/src/components/DataSyncModal.tsx
Normal file
@@ -0,0 +1,807 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
|
||||
const { Title, Text } = Typography;
|
||||
const { Step } = Steps;
|
||||
const { Option } = Select;
|
||||
|
||||
type SyncLogEvent = { jobId: string; level?: string; message?: string; ts?: number };
|
||||
type SyncProgressEvent = { jobId: string; percent?: number; current?: number; total?: number; table?: string; stage?: string };
|
||||
type SyncLogItem = { level: string; message: string; ts?: number };
|
||||
type TableDiffSummary = {
|
||||
table: string;
|
||||
pkColumn?: string;
|
||||
canSync?: boolean;
|
||||
inserts?: number;
|
||||
updates?: number;
|
||||
deletes?: number;
|
||||
same?: number;
|
||||
message?: string;
|
||||
};
|
||||
type TableOps = {
|
||||
insert: boolean;
|
||||
update: boolean;
|
||||
delete: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
// Step 1: Config
|
||||
const [sourceConnId, setSourceConnId] = useState<string>('');
|
||||
const [targetConnId, setTargetConnId] = useState<string>('');
|
||||
const [sourceDb, setSourceDb] = useState<string>('');
|
||||
const [targetDb, setTargetDb] = useState<string>('');
|
||||
|
||||
const [sourceDbs, setSourceDbs] = useState<string[]>([]);
|
||||
const [targetDbs, setTargetDbs] = useState<string[]>([]);
|
||||
|
||||
// Step 2: Tables
|
||||
const [allTables, setAllTables] = useState<string[]>([]);
|
||||
const [selectedTables, setSelectedTables] = useState<string[]>([]);
|
||||
|
||||
// Options
|
||||
const [syncContent, setSyncContent] = useState<'data' | 'schema' | 'both'>('data');
|
||||
const [syncMode, setSyncMode] = useState<string>('insert_update');
|
||||
const [autoAddColumns, setAutoAddColumns] = useState<boolean>(true);
|
||||
const [showSameTables, setShowSameTables] = useState<boolean>(false);
|
||||
const [analyzing, setAnalyzing] = useState<boolean>(false);
|
||||
const [diffTables, setDiffTables] = useState<TableDiffSummary[]>([]);
|
||||
const [tableOptions, setTableOptions] = useState<Record<string, TableOps>>({});
|
||||
|
||||
const [previewOpen, setPreviewOpen] = useState(false);
|
||||
const [previewTable, setPreviewTable] = useState<string>('');
|
||||
const [previewLoading, setPreviewLoading] = useState(false);
|
||||
const [previewData, setPreviewData] = useState<any>(null);
|
||||
|
||||
// Step 3: Result
|
||||
const [syncResult, setSyncResult] = useState<any>(null);
|
||||
const [syncing, setSyncing] = useState(false);
|
||||
const [syncLogs, setSyncLogs] = useState<SyncLogItem[]>([]);
|
||||
const [syncProgress, setSyncProgress] = useState<{ percent: number; current: number; total: number; table: string; stage: string }>({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: 0,
|
||||
table: '',
|
||||
stage: ''
|
||||
});
|
||||
const jobIdRef = useRef<string>('');
|
||||
const logBoxRef = useRef<HTMLDivElement>(null);
|
||||
const autoScrollRef = useRef(true);
|
||||
|
||||
const normalizeConnConfig = (conn: SavedConnection, database?: string) => ({
|
||||
...conn.config,
|
||||
port: Number((conn.config as any).port),
|
||||
password: conn.config.password || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: typeof database === 'string' ? database : (conn.config.database || ""),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!open) return;
|
||||
|
||||
const offLog = EventsOn('sync:log', (event: SyncLogEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
const msg = String(event.message || '').trim();
|
||||
if (!msg) return;
|
||||
setSyncLogs(prev => [...prev, { level: String(event.level || 'info'), message: msg, ts: event.ts }]);
|
||||
});
|
||||
|
||||
const offProgress = EventsOn('sync:progress', (event: SyncProgressEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
setSyncProgress(prev => ({
|
||||
percent: typeof event.percent === 'number' ? event.percent : prev.percent,
|
||||
current: typeof event.current === 'number' ? event.current : prev.current,
|
||||
total: typeof event.total === 'number' ? event.total : prev.total,
|
||||
table: typeof event.table === 'string' ? event.table : prev.table,
|
||||
stage: typeof event.stage === 'string' ? event.stage : prev.stage,
|
||||
}));
|
||||
});
|
||||
|
||||
return () => {
|
||||
offLog();
|
||||
offProgress();
|
||||
};
|
||||
}, [open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!logBoxRef.current) return;
|
||||
if (!autoScrollRef.current) return;
|
||||
logBoxRef.current.scrollTop = logBoxRef.current.scrollHeight;
|
||||
}, [syncLogs]);
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setCurrentStep(0);
|
||||
setSourceConnId('');
|
||||
setTargetConnId('');
|
||||
setSourceDb('');
|
||||
setTargetDb('');
|
||||
setSelectedTables([]);
|
||||
setSyncContent('data');
|
||||
setSyncMode('insert_update');
|
||||
setAutoAddColumns(true);
|
||||
setShowSameTables(false);
|
||||
setAnalyzing(false);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setPreviewOpen(false);
|
||||
setPreviewTable('');
|
||||
setPreviewLoading(false);
|
||||
setPreviewData(null);
|
||||
setSyncResult(null);
|
||||
setSyncing(false);
|
||||
setSyncLogs([]);
|
||||
setSyncProgress({ percent: 0, current: 0, total: 0, table: '', stage: '' });
|
||||
jobIdRef.current = '';
|
||||
autoScrollRef.current = true;
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
const handleSourceConnChange = async (connId: string) => {
|
||||
setSourceConnId(connId);
|
||||
setSourceDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch source databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleTargetConnChange = async (connId: string) => {
|
||||
setTargetConnId(connId);
|
||||
setTargetDb('');
|
||||
const conn = connections.find(c => c.id === connId);
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
} catch(e) { message.error("Failed to fetch target databases"); }
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const nextToTables = async () => {
|
||||
if (!sourceConnId || !targetConnId) return message.error("Select connections first");
|
||||
if (!sourceDb) return message.error("Select source database");
|
||||
if (!targetDb) return message.error("Select target database");
|
||||
|
||||
setLoading(true);
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
const tables = (res.data as any[]).map((row: any) => row.Table || row.table || row.TABLE_NAME || Object.values(row)[0]);
|
||||
setAllTables(tables as string[]);
|
||||
setCurrentStep(1);
|
||||
} else {
|
||||
message.error(res.message);
|
||||
}
|
||||
}
|
||||
} catch (e) { message.error("Failed to fetch tables"); }
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const updateTableOption = (table: string, key: keyof TableOps, value: any) => {
|
||||
setTableOptions(prev => ({
|
||||
...prev,
|
||||
[table]: { ...(prev[table] || { insert: true, update: true, delete: false }), [key]: value }
|
||||
}));
|
||||
};
|
||||
|
||||
const analyzeDiff = async () => {
|
||||
if (selectedTables.length === 0) return;
|
||||
if (!sourceConnId || !targetConnId) return message.error("Select connections first");
|
||||
if (!sourceDb || !targetDb) return message.error("Select databases first");
|
||||
|
||||
setLoading(true);
|
||||
setAnalyzing(true);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
const jobId = `analyze-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({ percent: 0, current: 0, total: selectedTables.length, table: '', stage: '差异分析' });
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: syncContent,
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncAnalyze(config as any);
|
||||
if (res.success) {
|
||||
const tables = ((res.data as any)?.tables || []) as TableDiffSummary[];
|
||||
setDiffTables(tables);
|
||||
const init: Record<string, TableOps> = {};
|
||||
tables.forEach(t => {
|
||||
const can = !!t.canSync;
|
||||
init[t.table] = {
|
||||
insert: can,
|
||||
update: can,
|
||||
delete: false,
|
||||
selectedInsertPks: [],
|
||||
selectedUpdatePks: [],
|
||||
selectedDeletePks: [],
|
||||
};
|
||||
});
|
||||
setTableOptions(init);
|
||||
message.success("差异分析完成");
|
||||
} else {
|
||||
message.error(res.message || "差异分析失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("差异分析失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
setAnalyzing(false);
|
||||
};
|
||||
|
||||
const openPreview = async (table: string) => {
|
||||
if (!table) return;
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
setPreviewOpen(true);
|
||||
setPreviewTable(table);
|
||||
setPreviewLoading(true);
|
||||
setPreviewData(null);
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: "data",
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncPreview(config as any, table, 200);
|
||||
if (res.success) {
|
||||
setPreviewData(res.data);
|
||||
} else {
|
||||
message.error(res.message || "加载差异预览失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("加载差异预览失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setPreviewLoading(false);
|
||||
};
|
||||
|
||||
const runSync = async () => {
|
||||
if (syncContent !== 'schema' && diffTables.length === 0) {
|
||||
message.error("请先对比差异,再开始同步");
|
||||
return;
|
||||
}
|
||||
if (syncContent !== 'schema' && syncMode === 'full_overwrite') {
|
||||
const ok = await new Promise<boolean>((resolve) => {
|
||||
Modal.confirm({
|
||||
title: '确认全量覆盖',
|
||||
content: '全量覆盖会清空目标表数据后再插入,请确认已备份目标库。',
|
||||
okText: '继续执行',
|
||||
cancelText: '取消',
|
||||
onOk: () => resolve(true),
|
||||
onCancel: () => resolve(false),
|
||||
});
|
||||
});
|
||||
if (!ok) return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setSyncing(true);
|
||||
setCurrentStep(2);
|
||||
setSyncResult(null);
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
const jobId = `sync-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: selectedTables.length,
|
||||
table: '',
|
||||
stage: '准备开始',
|
||||
});
|
||||
|
||||
const config = {
|
||||
sourceConfig: {
|
||||
...sConn.config,
|
||||
port: Number((sConn.config as any).port),
|
||||
password: sConn.config.password || "",
|
||||
useSSH: sConn.config.useSSH || false,
|
||||
ssh: sConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: sourceDb,
|
||||
},
|
||||
targetConfig: {
|
||||
...tConn.config,
|
||||
port: Number((tConn.config as any).port),
|
||||
password: tConn.config.password || "",
|
||||
useSSH: tConn.config.useSSH || false,
|
||||
ssh: tConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: targetDb,
|
||||
},
|
||||
tables: selectedTables,
|
||||
content: syncContent,
|
||||
mode: syncMode,
|
||||
autoAddColumns,
|
||||
tableOptions,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSync(config as any);
|
||||
setSyncResult(res);
|
||||
if (Array.isArray(res?.logs) && res.logs.length > 0) {
|
||||
setSyncLogs(prev => {
|
||||
if (prev.length > 0) return prev;
|
||||
return (res.logs as string[]).map((log) => {
|
||||
const msg = String(log || '').trim();
|
||||
if (msg.includes('致命错误') || msg.includes('失败')) return { level: 'error', message: msg };
|
||||
if (msg.includes('跳过') || msg.includes('警告')) return { level: 'warn', message: msg };
|
||||
return { level: 'info', message: msg };
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
message.error("Sync execution failed");
|
||||
setSyncResult({ success: false, message: "同步执行失败", logs: [] });
|
||||
}
|
||||
setLoading(false);
|
||||
setSyncing(false);
|
||||
};
|
||||
|
||||
const renderSyncLogItem = (item: SyncLogItem) => {
|
||||
const level = String(item.level || 'info').toLowerCase();
|
||||
const color = level === 'error' ? '#ff4d4f' : (level === 'warn' ? '#faad14' : '#595959');
|
||||
const label = level === 'error' ? '错误' : (level === 'warn' ? '警告' : '信息');
|
||||
const timeText = typeof item.ts === 'number' ? new Date(item.ts).toLocaleTimeString('zh-CN', { hour12: false }) : '';
|
||||
return (
|
||||
<div style={{ display: 'flex', gap: 8, alignItems: 'flex-start' }}>
|
||||
<span style={{ color, flex: '0 0 auto' }}>● {label}</span>
|
||||
{timeText && <span style={{ color: '#8c8c8c', flex: '0 0 auto' }}>{timeText}</span>}
|
||||
<span style={{ whiteSpace: 'pre-wrap', wordBreak: 'break-word' }}>{item.message}</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title="数据同步"
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
if (syncing) {
|
||||
message.warning("同步执行中,暂不支持关闭");
|
||||
return;
|
||||
}
|
||||
onClose();
|
||||
}}
|
||||
width={800}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
closable={!syncing}
|
||||
maskClosable={!syncing}
|
||||
>
|
||||
<Steps current={currentStep} style={{ marginBottom: 24 }}>
|
||||
<Step title="配置源与目标" />
|
||||
<Step title="选择表" />
|
||||
<Step title="执行结果" />
|
||||
</Steps>
|
||||
|
||||
{/* STEP 1: CONFIG */}
|
||||
{currentStep === 0 && (
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={sourceDb} onChange={setSourceDb} showSearch>
|
||||
{sourceDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={targetDb} onChange={setTargetDb} showSearch>
|
||||
{targetDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<Card title="同步选项" style={{ marginTop: 16 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="同步内容">
|
||||
<Select value={syncContent} onChange={setSyncContent}>
|
||||
<Option value="data">仅同步数据</Option>
|
||||
<Option value="schema">仅同步结构</Option>
|
||||
<Option value="both">同步结构 + 数据</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="同步模式">
|
||||
<Select value={syncMode} onChange={setSyncMode} disabled={syncContent === 'schema'}>
|
||||
<Option value="insert_update">增量同步(对比差异,按插入/更新/删除勾选执行)</Option>
|
||||
<Option value="insert_only">仅插入(不对比目标;无主键表将跳过)</Option>
|
||||
<Option value="full_overwrite">全量覆盖(清空目标表后插入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
|
||||
自动补齐目标表缺失字段(仅 MySQL 目标)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
{syncContent !== 'schema' && syncMode === 'full_overwrite' && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="全量覆盖会清空目标表数据,请谨慎使用。"
|
||||
/>
|
||||
)}
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* STEP 2: TABLES */}
|
||||
{currentStep === 1 && (
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
|
||||
显示相同表
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Transfer
|
||||
dataSource={allTables.map(t => ({ key: t, title: t }))}
|
||||
titles={['源表', '已选表']}
|
||||
targetKeys={selectedTables}
|
||||
onChange={(keys) => setSelectedTables(keys as string[])}
|
||||
render={item => item.title}
|
||||
listStyle={{ width: 350, height: 280, marginTop: 0 }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表', notFoundContent: '暂无数据' }}
|
||||
/>
|
||||
|
||||
{diffTables.length > 0 && (
|
||||
<div>
|
||||
<Divider orientation="left">对比结果</Divider>
|
||||
<Table
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey={(r: any) => r.table}
|
||||
dataSource={diffTables.filter(t => {
|
||||
const ins = Number(t.inserts || 0);
|
||||
const upd = Number(t.updates || 0);
|
||||
const del = Number(t.deletes || 0);
|
||||
const same = Number(t.same || 0);
|
||||
const msg = String(t.message || '').trim();
|
||||
const can = !!t.canSync;
|
||||
if (showSameTables) return true;
|
||||
if (!can) return true;
|
||||
if (msg) return true;
|
||||
return ins > 0 || upd > 0 || del > 0 || same === 0;
|
||||
})}
|
||||
columns={[
|
||||
{ title: '表名', dataIndex: 'table', key: 'table', ellipsis: true },
|
||||
{
|
||||
title: '插入',
|
||||
key: 'inserts',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.inserts || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.insert}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}
|
||||
>
|
||||
{Number(r.inserts || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '更新',
|
||||
key: 'updates',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.updates || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.update}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}
|
||||
>
|
||||
{Number(r.updates || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '删除',
|
||||
key: 'deletes',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.deletes || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.delete}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}
|
||||
>
|
||||
{Number(r.deletes || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{ title: '相同', dataIndex: 'same', key: 'same', width: 70, render: (v: any) => Number(v || 0) },
|
||||
{ title: '消息', dataIndex: 'message', key: 'message', ellipsis: true, render: (v: any) => (v ? String(v) : '') },
|
||||
{
|
||||
title: '预览',
|
||||
key: 'preview',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => {
|
||||
const can = !!r.canSync;
|
||||
const hasDiff = Number(r.inserts || 0) + Number(r.updates || 0) + Number(r.deletes || 0) > 0;
|
||||
return (
|
||||
<Button size="small" disabled={!can || !hasDiff || analyzing} onClick={() => openPreview(r.table)}>
|
||||
查看
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* STEP 3: RESULT */}
|
||||
{currentStep === 2 && (
|
||||
<div>
|
||||
<Alert
|
||||
message={syncing ? "正在同步" : (syncResult?.success ? "同步完成" : "同步失败")}
|
||||
description={
|
||||
syncing
|
||||
? `当前阶段:${syncProgress.stage || '执行中'}${syncProgress.table ? `,表:${syncProgress.table}` : ''}`
|
||||
: (syncResult?.message || `成功同步 ${syncResult?.tablesSynced || 0} 张表. 插入: ${syncResult?.rowsInserted || 0}, 更新: ${syncResult?.rowsUpdated || 0}`)
|
||||
}
|
||||
type={syncing ? "info" : (syncResult?.success ? "success" : "error")}
|
||||
showIcon
|
||||
/>
|
||||
|
||||
<div style={{ marginTop: 12 }}>
|
||||
<Progress
|
||||
percent={syncProgress.percent}
|
||||
status={syncing ? "active" : (syncResult?.success ? "success" : "exception")}
|
||||
format={() => `${syncProgress.current}/${syncProgress.total}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Divider orientation="left">日志</Divider>
|
||||
<div
|
||||
ref={logBoxRef}
|
||||
onScroll={() => {
|
||||
const el = logBoxRef.current;
|
||||
if (!el) return;
|
||||
const nearBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
|
||||
autoScrollRef.current = nearBottom;
|
||||
}}
|
||||
style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}
|
||||
>
|
||||
{syncLogs.map((item, i: number) => <div key={i}>{renderSyncLogItem(item)}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ marginTop: 24, textAlign: 'right' }}>
|
||||
{currentStep === 0 && (
|
||||
<Button type="primary" onClick={nextToTables} loading={loading}>下一步</Button>
|
||||
)}
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}>上一步</Button>
|
||||
<Button onClick={analyzeDiff} loading={loading} disabled={syncContent === 'schema' || selectedTables.length === 0 || analyzing} style={{ marginRight: 8 }}>
|
||||
对比差异
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runSync}
|
||||
loading={loading}
|
||||
disabled={selectedTables.length === 0 || (syncContent !== 'schema' && diffTables.length === 0)}
|
||||
>
|
||||
开始同步
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
{currentStep === 2 && (
|
||||
<>
|
||||
<Button disabled={syncing} onClick={() => setCurrentStep(1)} style={{ marginRight: 8 }}>继续同步</Button>
|
||||
<Button type="primary" disabled={syncing} onClick={onClose}>关闭</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
<Drawer
|
||||
title={`差异预览:${previewTable}`}
|
||||
open={previewOpen}
|
||||
onClose={() => { setPreviewOpen(false); setPreviewTable(''); setPreviewData(null); }}
|
||||
width={900}
|
||||
>
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览..." />}
|
||||
{!previewLoading && previewData && (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message={`插入 ${previewData.totalInserts || 0},更新 ${previewData.totalUpdates || 0},删除 ${previewData.totalDeletes || 0}(预览最多展示 200 条/类型)`}
|
||||
/>
|
||||
<Divider />
|
||||
<Tabs
|
||||
items={[
|
||||
{
|
||||
key: 'insert',
|
||||
label: `插入(${previewData.totalInserts || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部插入差异”;如不想执行插入请在对比结果中取消勾选“插入”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.inserts || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedInsertPks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedInsertPks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.insert }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'update',
|
||||
label: `更新(${previewData.totalUpdates || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部更新差异”;如不想执行更新请在对比结果中取消勾选“更新”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.updates || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedUpdatePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedUpdatePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.update }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '变更字段', dataIndex: 'changedColumns', key: 'changedColumns', render: (v: any) => Array.isArray(v) ? v.join(', ') : '' },
|
||||
{
|
||||
title: '详情',
|
||||
key: 'detail',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => (
|
||||
<Button size="small" onClick={() => {
|
||||
Modal.info({
|
||||
title: `更新详情:${previewTable} / ${r.pk}`,
|
||||
width: 900,
|
||||
content: (
|
||||
<div style={{ display: 'flex', gap: 12 }}>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>源</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.source, null, 2)}</pre>
|
||||
</div>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>目标</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.target, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
});
|
||||
}}>查看</Button>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'delete',
|
||||
label: `删除(${previewData.totalDeletes || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert type="warning" showIcon message="删除默认不勾选。请确认业务允许后再开启删除操作。" />
|
||||
<Text type="secondary">未勾选任何行表示“同步全部删除差异”;如不想执行删除请在对比结果中取消勾选“删除”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.deletes || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedDeletePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedDeletePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.delete }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default DataSyncModal;
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { useEffect, useState, useCallback } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { MySQLQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid from './DataGrid';
|
||||
import { DBQuery, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
|
||||
const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [data, setData] = useState<any[]>([]);
|
||||
@@ -11,11 +11,15 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { connections, addSqlLog } = useStore();
|
||||
const fetchSeqRef = useRef(0);
|
||||
const countSeqRef = useRef(0);
|
||||
const countKeyRef = useRef<string>('');
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
current: 1,
|
||||
pageSize: 100,
|
||||
total: 0
|
||||
total: 0,
|
||||
totalKnown: false
|
||||
});
|
||||
|
||||
const [sortInfo, setSortInfo] = useState<{ columnKey: string, order: string } | null>(null);
|
||||
@@ -24,11 +28,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [filterConditions, setFilterConditions] = useState<any[]>([]);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
setLoading(true);
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
message.error("Connection not found");
|
||||
setLoading(false);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -41,6 +46,20 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
|
||||
const quoteIdentPart = (ident: string) => {
|
||||
if (!ident) return ident;
|
||||
if (config.type === 'mysql') return `\`${ident.replace(/`/g, '``')}\``;
|
||||
return `"${ident.replace(/"/g, '""')}"`;
|
||||
};
|
||||
const quoteQualifiedIdent = (ident: string) => {
|
||||
const raw = (ident || '').trim();
|
||||
if (!raw) return raw;
|
||||
const parts = raw.split('.').filter(Boolean);
|
||||
if (parts.length <= 1) return quoteIdentPart(raw);
|
||||
return parts.map(quoteIdentPart).join('.');
|
||||
};
|
||||
const escapeLiteral = (val: string) => val.replace(/'/g, "''");
|
||||
|
||||
const dbName = tab.dbName || '';
|
||||
const tableName = tab.tableName || '';
|
||||
|
||||
@@ -48,47 +67,37 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
filterConditions.forEach(cond => {
|
||||
if (cond.column && cond.value) {
|
||||
if (cond.op === 'LIKE') {
|
||||
whereParts.push(`\`${cond.column}\` LIKE '%${cond.value}%'`);
|
||||
whereParts.push(`${quoteIdentPart(cond.column)} LIKE '%${escapeLiteral(cond.value)}%'`);
|
||||
} else {
|
||||
whereParts.push(`\`${cond.column}\` ${cond.op} '${cond.value}'`);
|
||||
whereParts.push(`${quoteIdentPart(cond.column)} ${cond.op} '${escapeLiteral(cond.value)}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
const whereSQL = whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : "";
|
||||
|
||||
const countSql = `SELECT COUNT(*) as total FROM \`${tableName}\` ${whereSQL}`;
|
||||
const countSql = `SELECT COUNT(*) as total FROM ${quoteQualifiedIdent(tableName)} ${whereSQL}`;
|
||||
|
||||
let sql = `SELECT * FROM \`${tableName}\` ${whereSQL}`;
|
||||
let sql = `SELECT * FROM ${quoteQualifiedIdent(tableName)} ${whereSQL}`;
|
||||
if (sortInfo && sortInfo.order) {
|
||||
sql += ` ORDER BY \`${sortInfo.columnKey}\` ${sortInfo.order === 'ascend' ? 'ASC' : 'DESC'}`;
|
||||
sql += ` ORDER BY ${quoteIdentPart(sortInfo.columnKey)} ${sortInfo.order === 'ascend' ? 'ASC' : 'DESC'}`;
|
||||
}
|
||||
const offset = (page - 1) * size;
|
||||
sql += ` LIMIT ${size} OFFSET ${offset}`;
|
||||
// 大表性能:打开表不阻塞在 COUNT(*),先通过多取 1 条判断是否还有下一页;总数在后台统计并异步回填。
|
||||
sql += ` LIMIT ${size + 1} OFFSET ${offset}`;
|
||||
|
||||
const startTime = Date.now();
|
||||
try {
|
||||
const pCount = MySQLQuery(config as any, dbName, countSql);
|
||||
const pData = MySQLQuery(config as any, dbName, sql);
|
||||
|
||||
let pCols = null;
|
||||
const pData = DBQuery(config as any, dbName, sql);
|
||||
|
||||
let pCols: Promise<any> | null = null;
|
||||
if (pkColumns.length === 0) {
|
||||
pCols = DBGetColumns(config as any, dbName, tableName);
|
||||
}
|
||||
|
||||
const [resCount, resData] = await Promise.all([pCount, pData]);
|
||||
const resData = await pData;
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
// Log Execution
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount.success ? 'success' : 'error',
|
||||
duration: duration / 2, // Estimate
|
||||
message: resCount.success ? '' : resCount.message,
|
||||
dbName
|
||||
});
|
||||
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-data`,
|
||||
timestamp: Date.now(),
|
||||
@@ -108,28 +117,81 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
}
|
||||
}
|
||||
|
||||
let totalRecords = 0;
|
||||
if (resCount.success && Array.isArray(resCount.data) && resCount.data.length > 0) {
|
||||
totalRecords = Number(resCount.data[0]['total']);
|
||||
}
|
||||
|
||||
if (resData.success) {
|
||||
let resultData = resData.data as any[];
|
||||
if (!Array.isArray(resultData)) resultData = [];
|
||||
|
||||
const hasMore = resultData.length > size;
|
||||
if (hasMore) resultData = resultData.slice(0, size);
|
||||
|
||||
let fieldNames = resData.fields || [];
|
||||
if (fieldNames.length === 0 && resultData.length > 0) {
|
||||
fieldNames = Object.keys(resultData[0]);
|
||||
}
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
setColumnNames(fieldNames);
|
||||
|
||||
setData(resultData.map((row: any, i: number) => ({ ...row, key: `row-${i}` })));
|
||||
|
||||
setPagination(prev => ({ ...prev, current: page, pageSize: size, total: totalRecords }));
|
||||
resultData.forEach((row: any, i: number) => {
|
||||
if (row && typeof row === 'object') row[GONAVI_ROW_KEY] = `row-${offset + i}`;
|
||||
});
|
||||
setData(resultData);
|
||||
const countKey = `${tab.connectionId}|${dbName}|${tableName}|${whereSQL}`;
|
||||
const derivedTotalKnown = !hasMore;
|
||||
const derivedTotal = derivedTotalKnown ? offset + resultData.length : page * size + 1;
|
||||
if (derivedTotalKnown) countKeyRef.current = countKey;
|
||||
|
||||
setPagination(prev => {
|
||||
if (derivedTotalKnown) {
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: true };
|
||||
}
|
||||
if (prev.totalKnown && countKeyRef.current === countKey) {
|
||||
return { ...prev, current: page, pageSize: size };
|
||||
}
|
||||
return { ...prev, current: page, pageSize: size, total: derivedTotal, totalKnown: false };
|
||||
});
|
||||
|
||||
if (!derivedTotalKnown) {
|
||||
if (countKeyRef.current !== countKey) {
|
||||
countKeyRef.current = countKey;
|
||||
const countSeq = ++countSeqRef.current;
|
||||
const countStart = Date.now();
|
||||
|
||||
DBQuery(config as any, dbName, countSql)
|
||||
.then((resCount: any) => {
|
||||
const countDuration = Date.now() - countStart;
|
||||
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-count`,
|
||||
timestamp: Date.now(),
|
||||
sql: countSql,
|
||||
status: resCount.success ? 'success' : 'error',
|
||||
duration: countDuration,
|
||||
message: resCount.success ? '' : resCount.message,
|
||||
dbName
|
||||
});
|
||||
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
|
||||
if (!resCount.success) return;
|
||||
if (!Array.isArray(resCount.data) || resCount.data.length === 0) return;
|
||||
|
||||
const total = Number(resCount.data[0]?.['total']);
|
||||
if (!Number.isFinite(total) || total < 0) return;
|
||||
|
||||
setPagination(prev => ({ ...prev, total, totalKnown: true }));
|
||||
})
|
||||
.catch(() => {
|
||||
if (countSeqRef.current !== countSeq) return;
|
||||
if (countKeyRef.current !== countKey) return;
|
||||
// 统计失败不影响主流程,不弹窗;可在日志里查看。
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
message.error(resData.message);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
message.error("Error fetching data: " + e.message);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-error`,
|
||||
@@ -141,7 +203,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
dbName
|
||||
});
|
||||
}
|
||||
setLoading(false);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns.length]);
|
||||
// Depend on pkColumns.length to avoid loop? No, pkColumns is updated inside.
|
||||
// Actually, 'pkColumns' state shouldn't trigger re-fetch.
|
||||
@@ -151,7 +213,10 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
// So it's fine.
|
||||
|
||||
// Handlers memoized
|
||||
const handleReload = useCallback(() => fetchData(), [fetchData]);
|
||||
const handleReload = useCallback(() => {
|
||||
countKeyRef.current = '';
|
||||
fetchData(pagination.current, pagination.pageSize);
|
||||
}, [fetchData, pagination.current, pagination.pageSize]);
|
||||
const handleSort = useCallback((field: string, order: string) => setSortInfo({ columnKey: field, order }), []);
|
||||
const handlePageChange = useCallback((page: number, size: number) => fetchData(page, size), [fetchData]);
|
||||
const handleToggleFilter = useCallback(() => setShowFilter(prev => !prev), []);
|
||||
@@ -183,4 +248,4 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
);
|
||||
};
|
||||
|
||||
export default DataViewer;
|
||||
export default DataViewer;
|
||||
|
||||
@@ -5,8 +5,8 @@ import { PlayCircleOutlined, SaveOutlined, FormatPainterOutlined, SettingOutline
|
||||
import { format } from 'sql-formatter';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { MySQLQuery, DBGetTables, DBGetAllColumns, MySQLGetDatabases, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid from './DataGrid';
|
||||
import { DBQuery, DBGetTables, DBGetAllColumns, DBGetDatabases, DBGetColumns } from '../../wailsjs/go/app/App';
|
||||
import DataGrid, { GONAVI_ROW_KEY } from './DataGrid';
|
||||
|
||||
const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [query, setQuery] = useState(tab.query || 'SELECT * FROM ');
|
||||
@@ -60,7 +60,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
|
||||
const res = await MySQLGetDatabases(config as any);
|
||||
const res = await DBGetDatabases(config as any);
|
||||
if (res.success && Array.isArray(res.data)) {
|
||||
const dbs = res.data.map((row: any) => row.Database || row.database);
|
||||
setDbList(dbs);
|
||||
@@ -252,7 +252,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
|
||||
const startTime = Date.now();
|
||||
try {
|
||||
const res = await MySQLQuery(config as any, currentDb, query);
|
||||
const res = await DBQuery(config as any, currentDb, query);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
addSqlLog({
|
||||
@@ -271,7 +271,11 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (res.data.length > 0) {
|
||||
const cols = Object.keys(res.data[0]);
|
||||
setColumnNames(cols);
|
||||
setResults(res.data.map((row: any, i: number) => ({ ...row, key: i })));
|
||||
const rows = res.data as any[];
|
||||
rows.forEach((row: any, i: number) => {
|
||||
if (row && typeof row === 'object') row[GONAVI_ROW_KEY] = i;
|
||||
});
|
||||
setResults(rows);
|
||||
} else {
|
||||
message.info('查询执行成功,但没有返回结果。');
|
||||
setResults([]);
|
||||
@@ -421,4 +425,4 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
);
|
||||
};
|
||||
|
||||
export default QueryEditor;
|
||||
export default QueryEditor;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { useEffect, useState, useMemo, useRef } from 'react';
|
||||
import { Tree, message, Dropdown, MenuProps, Input, Button, Modal, Form, Badge } from 'antd';
|
||||
import {
|
||||
DatabaseOutlined,
|
||||
TableOutlined,
|
||||
ConsoleSqlOutlined,
|
||||
import {
|
||||
DatabaseOutlined,
|
||||
TableOutlined,
|
||||
ConsoleSqlOutlined,
|
||||
HddOutlined,
|
||||
FolderOpenOutlined,
|
||||
FileTextOutlined,
|
||||
@@ -23,10 +23,10 @@ import {
|
||||
ReloadOutlined,
|
||||
DeleteOutlined,
|
||||
DisconnectOutlined
|
||||
} from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { SavedConnection } from '../types';
|
||||
import { MySQLGetDatabases, MySQLGetTables, MySQLShowCreateTable, ExportTable, OpenSQLFile, CreateDatabase } from '../../wailsjs/go/app/App';
|
||||
} from '@ant-design/icons';
|
||||
import { useStore } from '../store';
|
||||
import { SavedConnection } from '../types';
|
||||
import { DBGetDatabases, DBGetTables, DBShowCreateTable, ExportTable, OpenSQLFile, CreateDatabase } from '../../wailsjs/go/app/App';
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
@@ -116,21 +116,21 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
});
|
||||
};
|
||||
|
||||
const loadDatabases = async (node: any) => {
|
||||
const conn = node.dataRef as SavedConnection;
|
||||
const config = {
|
||||
...conn.config,
|
||||
const loadDatabases = async (node: any) => {
|
||||
const conn = node.dataRef as SavedConnection;
|
||||
const config = {
|
||||
...conn.config,
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || "",
|
||||
database: conn.config.database || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
const res = await MySQLGetDatabases(config as any);
|
||||
if (res.success) {
|
||||
setConnectionStates(prev => ({ ...prev, [conn.id]: 'success' }));
|
||||
let dbs = (res.data as any[]).map((row: any) => ({
|
||||
title: row.Database || row.database,
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
const res = await DBGetDatabases(config as any);
|
||||
if (res.success) {
|
||||
setConnectionStates(prev => ({ ...prev, [conn.id]: 'success' }));
|
||||
let dbs = (res.data as any[]).map((row: any) => ({
|
||||
title: row.Database || row.database,
|
||||
key: `${conn.id}-${row.Database || row.database}`,
|
||||
icon: <DatabaseOutlined />,
|
||||
type: 'database' as const,
|
||||
@@ -150,9 +150,9 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
}
|
||||
};
|
||||
|
||||
const loadTables = async (node: any) => {
|
||||
const conn = node.dataRef; // has dbName
|
||||
const dbName = conn.dbName;
|
||||
const loadTables = async (node: any) => {
|
||||
const conn = node.dataRef; // has dbName
|
||||
const dbName = conn.dbName;
|
||||
const key = node.key;
|
||||
|
||||
const dbQueries = savedQueries.filter(q => q.connectionId === conn.id && q.dbName === dbName);
|
||||
@@ -178,13 +178,13 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
port: Number(conn.config.port),
|
||||
password: conn.config.password || "",
|
||||
database: conn.config.database || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
const res = await MySQLGetTables(config as any, conn.dbName);
|
||||
if (res.success) {
|
||||
setConnectionStates(prev => ({ ...prev, [key as string]: 'success' }));
|
||||
const tables = (res.data as any[]).map((row: any) => {
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
};
|
||||
const res = await DBGetTables(config as any, conn.dbName);
|
||||
if (res.success) {
|
||||
setConnectionStates(prev => ({ ...prev, [key as string]: 'success' }));
|
||||
const tables = (res.data as any[]).map((row: any) => {
|
||||
const tableName = Object.values(row)[0] as string;
|
||||
return {
|
||||
title: tableName,
|
||||
@@ -345,13 +345,13 @@ const Sidebar: React.FC<{ onEditConnection?: (conn: SavedConnection) => void }>
|
||||
}
|
||||
};
|
||||
|
||||
const handleCopyStructure = async (node: any) => {
|
||||
const { config, dbName, tableName } = node.dataRef;
|
||||
const res = await MySQLShowCreateTable({
|
||||
...config,
|
||||
port: Number(config.port),
|
||||
password: config.password || "",
|
||||
database: config.database || "",
|
||||
const handleCopyStructure = async (node: any) => {
|
||||
const { config, dbName, tableName } = node.dataRef;
|
||||
const res = await DBShowCreateTable({
|
||||
...config,
|
||||
port: Number(config.port),
|
||||
password: config.password || "",
|
||||
database: config.database || "",
|
||||
useSSH: config.useSSH || false,
|
||||
ssh: config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" }
|
||||
} as any, dbName, tableName);
|
||||
|
||||
@@ -7,7 +7,7 @@ import { CSS } from '@dnd-kit/utilities';
|
||||
import { Resizable } from 'react-resizable';
|
||||
import { TabData, ColumnDefinition, IndexDefinition, ForeignKeyDefinition, TriggerDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetColumns, DBGetIndexes, MySQLQuery, DBGetForeignKeys, DBGetTriggers, DBShowCreateTable } from '../../wailsjs/go/app/App';
|
||||
import { DBGetColumns, DBGetIndexes, DBQuery, DBGetForeignKeys, DBGetTriggers, DBShowCreateTable } from '../../wailsjs/go/app/App';
|
||||
|
||||
// Need styles for react-resizable
|
||||
import 'react-resizable/css/styles.css';
|
||||
@@ -518,15 +518,15 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
}
|
||||
};
|
||||
|
||||
const handleExecuteSave = async () => {
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) return;
|
||||
const config = { ...conn.config, port: Number(conn.config.port), password: conn.config.password || "", database: conn.config.database || "", useSSH: conn.config.useSSH || false, ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" } };
|
||||
const res = await MySQLQuery(config as any, tab.dbName || '', previewSql);
|
||||
if (res.success) {
|
||||
message.success(isNewTable ? "表创建成功!" : "表结构修改成功!");
|
||||
setIsPreviewOpen(false);
|
||||
if (!isNewTable) {
|
||||
const handleExecuteSave = async () => {
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) return;
|
||||
const config = { ...conn.config, port: Number(conn.config.port), password: conn.config.password || "", database: conn.config.database || "", useSSH: conn.config.useSSH || false, ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" } };
|
||||
const res = await DBQuery(config as any, tab.dbName || '', previewSql);
|
||||
if (res.success) {
|
||||
message.success(isNewTable ? "表创建成功!" : "表结构修改成功!");
|
||||
setIsPreviewOpen(false);
|
||||
if (!isNewTable) {
|
||||
fetchData();
|
||||
} else {
|
||||
// TODO: Close tab or reload sidebar?
|
||||
@@ -730,4 +730,4 @@ const TableDesigner: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
);
|
||||
};
|
||||
|
||||
export default TableDesigner;
|
||||
export default TableDesigner;
|
||||
|
||||
7
frontend/wailsjs/go/app/App.d.ts
vendored
7
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -1,6 +1,7 @@
|
||||
// Cynhyrchwyd y ffeil hon yn awtomatig. PEIDIWCH Â MODIWL
|
||||
// This file is automatically generated. DO NOT EDIT
|
||||
import {connection} from '../models';
|
||||
import {sync} from '../models';
|
||||
|
||||
export function ApplyChanges(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:connection.ChangeSet):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -26,6 +27,12 @@ export function DBQuery(arg1:connection.ConnectionConfig,arg2:string,arg3:string
|
||||
|
||||
export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
|
||||
export function DataSyncAnalyze(arg1:sync.SyncConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSyncPreview(arg1:sync.SyncConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportData(arg1:Array<Record<string, any>>,arg2:Array<string>,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -50,6 +50,18 @@ export function DBShowCreateTable(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DBShowCreateTable'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function DataSync(arg1) {
|
||||
return window['go']['app']['App']['DataSync'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncAnalyze(arg1) {
|
||||
return window['go']['app']['App']['DataSyncAnalyze'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncPreview(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DataSyncPreview'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportData(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportData'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
@@ -77,6 +77,9 @@ export namespace connection {
|
||||
database: string;
|
||||
useSSH: boolean;
|
||||
ssh: SSHConfig;
|
||||
driver?: string;
|
||||
dsn?: string;
|
||||
timeout?: number;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new ConnectionConfig(source);
|
||||
@@ -92,6 +95,9 @@ export namespace connection {
|
||||
this.database = source["database"];
|
||||
this.useSSH = source["useSSH"];
|
||||
this.ssh = this.convertValues(source["ssh"], SSHConfig);
|
||||
this.driver = source["driver"];
|
||||
this.dsn = source["dsn"];
|
||||
this.timeout = source["timeout"];
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
@@ -134,3 +140,98 @@ export namespace connection {
|
||||
|
||||
}
|
||||
|
||||
export namespace sync {
|
||||
|
||||
export class TableOptions {
|
||||
insert?: boolean;
|
||||
update?: boolean;
|
||||
delete?: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new TableOptions(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.insert = source["insert"];
|
||||
this.update = source["update"];
|
||||
this.delete = source["delete"];
|
||||
this.selectedInsertPks = source["selectedInsertPks"];
|
||||
this.selectedUpdatePks = source["selectedUpdatePks"];
|
||||
this.selectedDeletePks = source["selectedDeletePks"];
|
||||
}
|
||||
}
|
||||
export class SyncConfig {
|
||||
sourceConfig: connection.ConnectionConfig;
|
||||
targetConfig: connection.ConnectionConfig;
|
||||
tables: string[];
|
||||
content?: string;
|
||||
mode: string;
|
||||
jobId?: string;
|
||||
autoAddColumns?: boolean;
|
||||
tableOptions?: Record<string, TableOptions>;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new SyncConfig(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.sourceConfig = this.convertValues(source["sourceConfig"], connection.ConnectionConfig);
|
||||
this.targetConfig = this.convertValues(source["targetConfig"], connection.ConnectionConfig);
|
||||
this.tables = source["tables"];
|
||||
this.content = source["content"];
|
||||
this.mode = source["mode"];
|
||||
this.jobId = source["jobId"];
|
||||
this.autoAddColumns = source["autoAddColumns"];
|
||||
this.tableOptions = this.convertValues(source["tableOptions"], TableOptions, true);
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
if (!a) {
|
||||
return a;
|
||||
}
|
||||
if (a.slice && a.map) {
|
||||
return (a as any[]).map(elem => this.convertValues(elem, classs));
|
||||
} else if ("object" === typeof a) {
|
||||
if (asMap) {
|
||||
for (const key of Object.keys(a)) {
|
||||
a[key] = new classs(a[key]);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
return new classs(a);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
}
|
||||
export class SyncResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
logs: string[];
|
||||
tablesSynced: number;
|
||||
rowsInserted: number;
|
||||
rowsUpdated: number;
|
||||
rowsDeleted: number;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new SyncResult(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.success = source["success"];
|
||||
this.message = source["message"];
|
||||
this.logs = source["logs"];
|
||||
this.tablesSynced = source["tablesSynced"];
|
||||
this.rowsInserted = source["rowsInserted"];
|
||||
this.rowsUpdated = source["rowsUpdated"];
|
||||
this.rowsDeleted = source["rowsDeleted"];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
4
go.mod
4
go.mod
@@ -3,8 +3,11 @@ module GoNavi-Wails
|
||||
go 1.24.3
|
||||
|
||||
require (
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3
|
||||
gitee.com/chunanyong/dm v1.8.22
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/lib/pq v1.11.1
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/wailsapp/wails/v2 v2.11.0
|
||||
golang.org/x/crypto v0.47.0
|
||||
modernc.org/sqlite v1.44.3
|
||||
@@ -16,6 +19,7 @@ require (
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/godbus/dbus/v5 v5.1.0 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/jchv/go-winloader v0.0.0-20210711035445-715c2860da7e // indirect
|
||||
|
||||
10
go.sum
10
go.sum
@@ -1,5 +1,9 @@
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3 h1:QjslQNaH5Nuap5i4nijS0OYV6GMk5kqrAmgU90zBKd4=
|
||||
gitea.com/kingbase/gokb v0.0.0-20201021123113-29bd62a876c3/go.mod h1:7lH5A1jzCXD9Nl16DzaBUOfDAT8NPrDmZwKu1p5wf94=
|
||||
gitee.com/chunanyong/dm v1.8.22 h1:H7fsrnUIvEA0jlDWew7vwELry1ff+tLMIu2Fk2cIBSg=
|
||||
gitee.com/chunanyong/dm v1.8.22/go.mod h1:EPRJnuPFgbyOFgJ0TRYCTGzhq+ZT4wdyaj/GW/LLcNg=
|
||||
github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY=
|
||||
github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
@@ -12,6 +16,9 @@ github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
|
||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
@@ -61,6 +68,8 @@ github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
|
||||
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
github.com/sijms/go-ora/v2 v2.9.0/go.mod h1:QgFInVi3ZWyqAiJwzBQA+nbKYKH77tdp1PYoCqhR2dU=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tkrajina/go-reflector v0.5.8 h1:yPADHrwmUbMq4RGEyaOUpz2H90sRsETNVpjzo3DLVQQ=
|
||||
@@ -97,6 +106,7 @@ golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
|
||||
@@ -2,11 +2,18 @@ package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
)
|
||||
|
||||
// App struct
|
||||
@@ -27,55 +34,149 @@ func NewApp() *App {
|
||||
// so we can call the runtime methods
|
||||
func (a *App) Startup(ctx context.Context) {
|
||||
a.ctx = ctx
|
||||
logger.Init()
|
||||
logger.Infof("应用启动完成")
|
||||
}
|
||||
|
||||
// Shutdown is called when the app terminates
|
||||
func (a *App) Shutdown(ctx context.Context) {
|
||||
logger.Infof("应用开始关闭,准备释放资源")
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
for _, dbInst := range a.dbCache {
|
||||
dbInst.Close()
|
||||
if err := dbInst.Close(); err != nil {
|
||||
logger.Error(err, "关闭数据库连接失败")
|
||||
}
|
||||
}
|
||||
logger.Infof("资源释放完成,应用已关闭")
|
||||
logger.Close()
|
||||
}
|
||||
|
||||
// Helper: Generate a unique key for the connection config
|
||||
func getCacheKey(config connection.ConnectionConfig) string {
|
||||
sshPart := ""
|
||||
if config.UseSSH {
|
||||
sshPart = fmt.Sprintf("|ssh:%s@%s:%d|%s", config.SSH.User, config.SSH.Host, config.SSH.Port, config.SSH.KeyPath)
|
||||
// We don't include SSH password in key string to avoid log exposure if key is logged,
|
||||
// but for cache uniqueness it is critical.
|
||||
// Let's include a hash or just the value if we assume internal use.
|
||||
// Including value for correctness.
|
||||
sshPart += "|" + config.SSH.Password
|
||||
if !config.UseSSH {
|
||||
config.SSH = connection.SSHConfig{}
|
||||
}
|
||||
return fmt.Sprintf("%s|%s:%s@%s:%d|%s%s", config.Type, config.User, config.Password, config.Host, config.Port, config.Database, sshPart)
|
||||
// 保持与驱动默认一致,避免同一连接被重复缓存
|
||||
if config.Type == "postgres" && config.Database == "" {
|
||||
config.Database = "postgres"
|
||||
}
|
||||
|
||||
b, _ := json.Marshal(config)
|
||||
sum := sha256.Sum256(b)
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
func wrapConnectError(config connection.ConnectionConfig, err error) error {
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var netErr net.Error
|
||||
if errors.Is(err, context.DeadlineExceeded) || (errors.As(err, &netErr) && netErr.Timeout()) {
|
||||
dbName := config.Database
|
||||
if dbName == "" {
|
||||
dbName = "(default)"
|
||||
}
|
||||
err = fmt.Errorf("数据库连接超时:%s %s:%d/%s:%w", config.Type, config.Host, config.Port, dbName, err)
|
||||
}
|
||||
|
||||
return withLogHint{err: err, logPath: logger.Path()}
|
||||
}
|
||||
|
||||
type withLogHint struct {
|
||||
err error
|
||||
logPath string
|
||||
}
|
||||
|
||||
func (e withLogHint) Error() string {
|
||||
if strings.TrimSpace(e.logPath) == "" {
|
||||
return e.err.Error()
|
||||
}
|
||||
return fmt.Sprintf("%s(详细日志:%s)", e.err.Error(), e.logPath)
|
||||
}
|
||||
|
||||
func (e withLogHint) Unwrap() error {
|
||||
return e.err
|
||||
}
|
||||
|
||||
func formatConnSummary(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
dbName := config.Database
|
||||
if strings.TrimSpace(dbName) == "" {
|
||||
dbName = "(default)"
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString(fmt.Sprintf("类型=%s 地址=%s:%d 数据库=%s 用户=%s 超时=%ds",
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds))
|
||||
|
||||
if config.UseSSH {
|
||||
b.WriteString(fmt.Sprintf(" SSH=%s:%d 用户=%s", config.SSH.Host, config.SSH.Port, config.SSH.User))
|
||||
}
|
||||
|
||||
if config.Type == "custom" {
|
||||
driver := strings.TrimSpace(config.Driver)
|
||||
if driver == "" {
|
||||
driver = "(未配置)"
|
||||
}
|
||||
dsnState := "未配置"
|
||||
if strings.TrimSpace(config.DSN) != "" {
|
||||
dsnState = fmt.Sprintf("已配置(长度=%d)", len(config.DSN))
|
||||
}
|
||||
b.WriteString(fmt.Sprintf(" 驱动=%s DSN=%s", driver, dsnState))
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Helper: Get or create a database connection
|
||||
func (a *App) getDatabase(config connection.ConnectionConfig) (db.Database, error) {
|
||||
key := getCacheKey(config)
|
||||
shortKey := key
|
||||
if len(shortKey) > 12 {
|
||||
shortKey = shortKey[:12]
|
||||
}
|
||||
if config.UseSSH && config.Type != "mysql" {
|
||||
logger.Warnf("当前仅 MySQL 支持内置 SSH 直连,其他类型请使用本地端口转发:%s", formatConnSummary(config))
|
||||
}
|
||||
logger.Infof("获取数据库连接:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
|
||||
a.mu.Lock()
|
||||
defer a.mu.Unlock()
|
||||
|
||||
if dbInst, ok := a.dbCache[key]; ok {
|
||||
logger.Infof("命中连接缓存,开始检测可用性:缓存Key=%s", shortKey)
|
||||
if err := dbInst.Ping(); err == nil {
|
||||
logger.Infof("缓存连接可用:缓存Key=%s", shortKey)
|
||||
return dbInst, nil
|
||||
} else {
|
||||
logger.Error(err, "缓存连接不可用,准备重建:缓存Key=%s", shortKey)
|
||||
}
|
||||
if err := dbInst.Close(); err != nil {
|
||||
logger.Error(err, "关闭失效缓存连接失败:缓存Key=%s", shortKey)
|
||||
}
|
||||
dbInst.Close()
|
||||
delete(a.dbCache, key)
|
||||
}
|
||||
|
||||
logger.Infof("创建数据库驱动实例:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
dbInst, err := db.NewDatabase(config.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "创建数据库驱动实例失败:类型=%s 缓存Key=%s", config.Type, shortKey)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dbInst.Connect(config); err != nil {
|
||||
return nil, err
|
||||
wrapped := wrapConnectError(config, err)
|
||||
logger.Error(wrapped, "建立数据库连接失败:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
return nil, wrapped
|
||||
}
|
||||
|
||||
a.dbCache[key] = dbInst
|
||||
logger.Infof("数据库连接成功并写入缓存:%s 缓存Key=%s", formatConnSummary(config), shortKey)
|
||||
return dbInst, nil
|
||||
}
|
||||
|
||||
56
internal/app/db_context.go
Normal file
56
internal/app/db_context.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func normalizeRunConfig(config connection.ConnectionConfig, dbName string) connection.ConnectionConfig {
|
||||
runConfig := config
|
||||
name := strings.TrimSpace(dbName)
|
||||
if name == "" {
|
||||
return runConfig
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "mysql", "postgres", "kingbase":
|
||||
// 这些类型的 dbName 表示“数据库”,需要写入连接配置以选择目标库。
|
||||
runConfig.Database = name
|
||||
case "dameng":
|
||||
// 达梦使用 schema 参数,沿用现有行为:dbName 表示 schema。
|
||||
runConfig.Database = name
|
||||
default:
|
||||
// oracle: dbName 表示 schema/owner,不能覆盖 config.Database(服务名)
|
||||
// sqlite: 无需设置 Database
|
||||
// custom: 语义不明确,避免污染缓存 key
|
||||
}
|
||||
|
||||
return runConfig
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTable(config connection.ConnectionConfig, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(config.Type)) {
|
||||
case "postgres", "kingbase":
|
||||
// PG/金仓:dbName 在 UI 里是“数据库”,schema 需从 tableName 或使用默认 public。
|
||||
return "public", rawTable
|
||||
default:
|
||||
// MySQL:dbName 表示数据库;Oracle/达梦:dbName 表示 schema/owner。
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
)
|
||||
|
||||
// Generic DB Methods
|
||||
@@ -13,18 +14,22 @@ func (a *App) DBConnect(config connection.ConnectionConfig) connection.QueryResu
|
||||
// getDatabase checks cache and Pings. If valid, reuses. If not, connects.
|
||||
_, err := a.getDatabase(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBConnect 连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("DBConnect 连接成功:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
func (a *App) TestConnection(config connection.ConnectionConfig) connection.QueryResult {
|
||||
_, err := a.getDatabase(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "TestConnection 连接测试失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
logger.Infof("TestConnection 连接测试成功:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: true, Message: "连接成功"}
|
||||
}
|
||||
|
||||
@@ -37,9 +42,11 @@ func (a *App) CreateDatabase(config connection.ConnectionConfig, dbName string)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("CREATE DATABASE `%%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci", dbName)
|
||||
escapedDbName := strings.ReplaceAll(dbName, "`", "``")
|
||||
query := fmt.Sprintf("CREATE DATABASE `%s` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci", escapedDbName)
|
||||
if runConfig.Type == "postgres" {
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%%s\"", dbName)
|
||||
escapedDbName = strings.ReplaceAll(dbName, `"`, `""`)
|
||||
query = fmt.Sprintf("CREATE DATABASE \"%s\"", escapedDbName)
|
||||
}
|
||||
|
||||
_, err = dbInst.Exec(query)
|
||||
@@ -76,13 +83,11 @@ func (a *App) MySQLShowCreateTable(config connection.ConnectionConfig, dbName st
|
||||
}
|
||||
|
||||
func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
@@ -90,26 +95,39 @@ func (a *App) DBQuery(config connection.ConnectionConfig, dbName string, query s
|
||||
if strings.HasPrefix(lowerQuery, "select") || strings.HasPrefix(lowerQuery, "show") || strings.HasPrefix(lowerQuery, "describe") || strings.HasPrefix(lowerQuery, "explain") {
|
||||
data, columns, err := dbInst.Query(query)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 查询失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: data, Fields: columns}
|
||||
} else {
|
||||
affected, err := dbInst.Exec(query)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBQuery 执行失败:%s SQL片段=%q", formatConnSummary(runConfig), sqlSnippet(query))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Data: map[string]int64{"affectedRows": affected}}
|
||||
}
|
||||
}
|
||||
|
||||
func sqlSnippet(query string) string {
|
||||
q := strings.TrimSpace(query)
|
||||
const max = 200
|
||||
if len(q) <= max {
|
||||
return q
|
||||
}
|
||||
return q[:max] + "..."
|
||||
}
|
||||
|
||||
func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.QueryResult {
|
||||
dbInst, err := a.getDatabase(config)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取连接失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
dbs, err := dbInst.GetDatabases()
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetDatabases 获取数据库列表失败:%s", formatConnSummary(config))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
@@ -122,18 +140,17 @@ func (a *App) DBGetDatabases(config connection.ConnectionConfig) connection.Quer
|
||||
}
|
||||
|
||||
func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
tables, err := dbInst.GetTables(dbName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBGetTables 获取表列表失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
@@ -146,18 +163,18 @@ func (a *App) DBGetTables(config connection.ConnectionConfig, dbName string) con
|
||||
}
|
||||
|
||||
func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取连接失败:%s", formatConnSummary(runConfig))
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
sqlStr, err := dbInst.GetCreateStatement(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
sqlStr, err := dbInst.GetCreateStatement(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "DBShowCreateTable 获取建表语句失败:%s 表=%s", formatConnSummary(runConfig), tableName)
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
@@ -165,17 +182,15 @@ func (a *App) DBShowCreateTable(config connection.ConnectionConfig, dbName strin
|
||||
}
|
||||
|
||||
func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
columns, err := dbInst.GetColumns(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
columns, err := dbInst.GetColumns(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -184,17 +199,15 @@ func (a *App) DBGetColumns(config connection.ConnectionConfig, dbName string, ta
|
||||
}
|
||||
|
||||
func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
indexes, err := dbInst.GetIndexes(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
indexes, err := dbInst.GetIndexes(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -203,17 +216,15 @@ func (a *App) DBGetIndexes(config connection.ConnectionConfig, dbName string, ta
|
||||
}
|
||||
|
||||
func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
fks, err := dbInst.GetForeignKeys(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
fks, err := dbInst.GetForeignKeys(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -222,17 +233,15 @@ func (a *App) DBGetForeignKeys(config connection.ConnectionConfig, dbName string
|
||||
}
|
||||
|
||||
func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, tableName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
triggers, err := dbInst.GetTriggers(dbName, tableName)
|
||||
schemaName, pureTableName := normalizeSchemaAndTable(config, dbName, tableName)
|
||||
triggers, err := dbInst.GetTriggers(schemaName, pureTableName)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
@@ -241,10 +250,7 @@ func (a *App) DBGetTriggers(config connection.ConnectionConfig, dbName string, t
|
||||
}
|
||||
|
||||
func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -257,4 +263,4 @@ func (a *App) DBGetAllColumns(config connection.ConnectionConfig, dbName string)
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Data: cols}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
@@ -134,10 +135,7 @@ func (a *App) ImportData(config connection.ConnectionConfig, dbName, tableName s
|
||||
return connection.QueryResult{Success: true, Message: "No data to import"}
|
||||
}
|
||||
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
@@ -163,25 +161,20 @@ func (a *App) ImportData(config connection.ConnectionConfig, dbName, tableName s
|
||||
values = append(values, fmt.Sprintf("'%s'", vStr))
|
||||
}
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO `%s` (%s) VALUES (%s)",
|
||||
tableName,
|
||||
strings.Join(cols, ", "),
|
||||
strings.Join(values, ", "))
|
||||
|
||||
if runConfig.Type == "postgres" {
|
||||
pgCols := make([]string, len(cols))
|
||||
for i, c := range cols { pgCols[i] = fmt.Sprintf("\"%s\"", c) }
|
||||
query = fmt.Sprintf("INSERT INTO \"%s\" (%s) VALUES (%s)",
|
||||
tableName,
|
||||
strings.Join(pgCols, ", "),
|
||||
strings.Join(values, ", "))
|
||||
quotedCols := make([]string, len(cols))
|
||||
for i, c := range cols {
|
||||
quotedCols[i] = quoteIdentByType(runConfig.Type, c)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)",
|
||||
quoteQualifiedIdentByType(runConfig.Type, tableName),
|
||||
strings.Join(quotedCols, ", "),
|
||||
strings.Join(values, ", "))
|
||||
|
||||
_, err := dbInst.Exec(query)
|
||||
if err != nil {
|
||||
errCount++
|
||||
fmt.Println("Import Error:", err)
|
||||
logger.Error(err, "导入数据失败:表=%s", tableName)
|
||||
} else {
|
||||
successCount++
|
||||
}
|
||||
@@ -191,10 +184,7 @@ func (a *App) ImportData(config connection.ConnectionConfig, dbName, tableName s
|
||||
}
|
||||
|
||||
func (a *App) ApplyChanges(config connection.ConnectionConfig, dbName, tableName string, changes connection.ChangeSet) connection.QueryResult {
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
@@ -222,20 +212,14 @@ func (a *App) ExportTable(config connection.ConnectionConfig, dbName string, tab
|
||||
return connection.QueryResult{Success: false, Message: "Cancelled"}
|
||||
}
|
||||
|
||||
runConfig := config
|
||||
if dbName != "" {
|
||||
runConfig.Database = dbName
|
||||
}
|
||||
runConfig := normalizeRunConfig(config, dbName)
|
||||
|
||||
dbInst, err := a.getDatabase(runConfig)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SELECT * FROM `%s`", tableName)
|
||||
if runConfig.Type == "postgres" {
|
||||
query = fmt.Sprintf("SELECT * FROM \"%s\"", tableName)
|
||||
}
|
||||
query := fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(runConfig.Type, tableName))
|
||||
|
||||
data, columns, err := dbInst.Query(query)
|
||||
if err != nil {
|
||||
@@ -317,6 +301,45 @@ data, columns, err := dbInst.Query(query)
|
||||
return connection.QueryResult{Success: true, Message: "Export successful"}
|
||||
}
|
||||
|
||||
func quoteIdentByType(dbType string, ident string) string {
|
||||
if ident == "" {
|
||||
return ident
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "mysql":
|
||||
return "`" + strings.ReplaceAll(ident, "`", "``") + "`"
|
||||
default:
|
||||
return `"` + strings.ReplaceAll(ident, `"`, `""`) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
func quoteQualifiedIdentByType(dbType string, ident string) string {
|
||||
raw := strings.TrimSpace(ident)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
|
||||
parts := strings.Split(raw, ".")
|
||||
if len(parts) <= 1 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
|
||||
quotedParts := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
quotedParts = append(quotedParts, quoteIdentByType(dbType, part))
|
||||
}
|
||||
|
||||
if len(quotedParts) == 0 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
return strings.Join(quotedParts, ".")
|
||||
}
|
||||
|
||||
// ExportData exports provided data to a file
|
||||
func (a *App) ExportData(data []map[string]interface{}, columns []string, defaultName string, format string) connection.QueryResult {
|
||||
if defaultName == "" {
|
||||
@@ -404,4 +427,4 @@ func (a *App) ExportData(data []map[string]interface{}, columns []string, defaul
|
||||
}
|
||||
|
||||
return connection.QueryResult{Success: true, Message: "Export successful"}
|
||||
}
|
||||
}
|
||||
|
||||
99
internal/app/methods_sync.go
Normal file
99
internal/app/methods_sync.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/sync"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
// DataSync executes a data synchronization task
|
||||
func (a *App) DataSync(config sync.SyncConfig) sync.SyncResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("sync-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.RunSync(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// DataSyncAnalyze analyzes differences between source and target for the given tables (dry-run).
|
||||
func (a *App) DataSyncAnalyze(config sync.SyncConfig) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("analyze-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.Analyze(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
if !res.Success {
|
||||
return connection.QueryResult{Success: false, Message: res.Message, Data: res}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: res.Message, Data: res}
|
||||
}
|
||||
|
||||
// DataSyncPreview returns a limited preview of diff rows for one table.
|
||||
func (a *App) DataSyncPreview(config sync.SyncConfig, tableName string, limit int) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("preview-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
engine := sync.NewSyncEngine(sync.Reporter{})
|
||||
preview, err := engine.Preview(config, tableName, limit)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: preview}
|
||||
}
|
||||
@@ -19,14 +19,17 @@ type ConnectionConfig struct {
|
||||
Database string `json:"database"`
|
||||
UseSSH bool `json:"useSSH"`
|
||||
SSH SSHConfig `json:"ssh"`
|
||||
Driver string `json:"driver,omitempty"` // For custom connection
|
||||
DSN string `json:"dsn,omitempty"` // For custom connection
|
||||
Timeout int `json:"timeout,omitempty"` // Connection timeout in seconds (default: 30)
|
||||
}
|
||||
|
||||
// QueryResult is the standard response format for Wails methods
|
||||
type QueryResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Data interface{} `json:"data"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Data interface{} `json:"data"`
|
||||
Fields []string `json:"fields,omitempty"`
|
||||
}
|
||||
|
||||
// ColumnDefinition represents a table column
|
||||
@@ -36,7 +39,7 @@ type ColumnDefinition struct {
|
||||
Nullable string `json:"nullable"` // YES/NO
|
||||
Key string `json:"key"` // PRI, UNI, MUL
|
||||
Default *string `json:"default"`
|
||||
Extra string `json:"extra"` // auto_increment
|
||||
Extra string `json:"extra"` // auto_increment
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
|
||||
256
internal/db/custom_impl.go
Normal file
256
internal/db/custom_impl.go
Normal file
@@ -0,0 +1,256 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
)
|
||||
|
||||
type CustomDB struct {
|
||||
conn *sql.DB
|
||||
driver string
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (c *CustomDB) Connect(config connection.ConnectionConfig) error {
|
||||
if config.Driver == "" || config.DSN == "" {
|
||||
return fmt.Errorf("driver and dsn are required for custom connection")
|
||||
}
|
||||
|
||||
// Verify driver is registered (implicit check by sql.Open)
|
||||
// We might not need explicit check, sql.Open will fail or Ping will fail if driver not found.
|
||||
|
||||
db, err := sql.Open(config.Driver, config.DSN)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
c.conn = db
|
||||
c.driver = config.Driver
|
||||
c.pingTimeout = getConnectTimeout(config)
|
||||
if err := c.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) Close() error {
|
||||
if c.conn != nil {
|
||||
return c.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) Ping() error {
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := c.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return c.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if c.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := c.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
columns, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) Exec(query string) (int64, error) {
|
||||
if c.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := c.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetDatabases() ([]string, error) {
|
||||
// Try standard information_schema or some known patterns if we can't guess
|
||||
// For "custom", we can't easily know.
|
||||
// But many DBs support SHOW DATABASES or SELECT datname FROM pg_database
|
||||
// We'll try a generic query or return empty.
|
||||
// Users using custom might know their DB context is single.
|
||||
|
||||
// Best effort:
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetTables(dbName string) ([]string, error) {
|
||||
// ANSI Standard
|
||||
query := "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
|
||||
// If mysql-like
|
||||
if c.driver == "mysql" {
|
||||
query = "SHOW TABLES"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
} else if c.driver == "postgres" || c.driver == "kingbase" {
|
||||
query = `
|
||||
SELECT table_schema AS schemaname, table_name AS tablename
|
||||
FROM information_schema.tables
|
||||
WHERE table_type = 'BASE TABLE'
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema')`
|
||||
if dbName != "" {
|
||||
query += fmt.Sprintf(" AND table_schema = '%s'", dbName)
|
||||
}
|
||||
query += " ORDER BY table_schema, table_name"
|
||||
} else if c.driver == "sqlite" {
|
||||
query = "SELECT name FROM sqlite_master WHERE type='table'"
|
||||
} else if c.driver == "oracle" || c.driver == "dm" {
|
||||
query = "SELECT table_name FROM user_tables"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback generic execution
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tables for custom driver %s: %v", c.driver, err)
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if schema, okSchema := row["schemaname"]; okSchema {
|
||||
if name, okName := row["tablename"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
// iterate keys to find likely column
|
||||
for k, v := range row {
|
||||
if strings.Contains(strings.ToLower(k), "name") || strings.Contains(strings.ToLower(k), "table") {
|
||||
tables = append(tables, fmt.Sprintf("%v", v))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
return "Not supported for custom connections yet", nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
// ANSI Standard
|
||||
// SELECT column_name, data_type, is_nullable, column_default FROM information_schema.columns WHERE table_name = '...'
|
||||
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = '%s'`, tableName)
|
||||
|
||||
// Adjust for schema if likely supported
|
||||
if c.driver == "postgres" || c.driver == "kingbase" {
|
||||
query += fmt.Sprintf(" AND table_schema = '%s'", schema)
|
||||
} else if c.driver == "mysql" {
|
||||
query = fmt.Sprintf("SHOW FULL COLUMNS FROM `%s`", tableName)
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW FULL COLUMNS FROM `%s`.`%s`", dbName, tableName)
|
||||
}
|
||||
}
|
||||
|
||||
data, _, err := c.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{}
|
||||
// flexible mapping
|
||||
for k, v := range row {
|
||||
kl := strings.ToLower(k)
|
||||
val := fmt.Sprintf("%v", v)
|
||||
if strings.Contains(kl, "field") || strings.Contains(kl, "column_name") {
|
||||
col.Name = val
|
||||
} else if strings.Contains(kl, "type") {
|
||||
col.Type = val
|
||||
} else if strings.Contains(kl, "null") || strings.Contains(kl, "nullable") {
|
||||
col.Nullable = val
|
||||
} else if strings.Contains(kl, "default") {
|
||||
col.Default = &val
|
||||
} else if strings.Contains(kl, "key") {
|
||||
col.Key = val
|
||||
} else if strings.Contains(kl, "comment") {
|
||||
col.Comment = val
|
||||
}
|
||||
}
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
return nil, fmt.Errorf("not implemented for custom")
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
return nil, fmt.Errorf("not implemented for custom")
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
return nil, fmt.Errorf("not implemented for custom")
|
||||
}
|
||||
|
||||
func (c *CustomDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode for custom")
|
||||
}
|
||||
|
||||
func (c *CustomDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
return nil, fmt.Errorf("not implemented for custom")
|
||||
}
|
||||
363
internal/db/dameng_impl.go
Normal file
363
internal/db/dameng_impl.go
Normal file
@@ -0,0 +1,363 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "gitee.com/chunanyong/dm"
|
||||
)
|
||||
|
||||
type DamengDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (d *DamengDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// dm://user:password@host:port?schema=...
|
||||
// or dm://user:password@host:port
|
||||
|
||||
address := net.JoinHostPort(config.Host, strconv.Itoa(config.Port))
|
||||
if config.UseSSH {
|
||||
// SSH logic similar to others, assumes port forwarding
|
||||
_, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// DM driver likely uses standard net.Dial, so we might need a local listener
|
||||
// or assume port forwarding is handled externally or implicitly via "tcp" override if driver allows.
|
||||
// Similar to Oracle, we skip complex custom dialer injection for now.
|
||||
}
|
||||
}
|
||||
|
||||
escapedPassword := url.PathEscape(config.Password)
|
||||
q := url.Values{}
|
||||
if config.Database != "" {
|
||||
q.Set("schema", config.Database)
|
||||
}
|
||||
if escapedPassword != config.Password {
|
||||
// 达梦驱动要求:密码包含特殊字符时,password 需 PathEscape,并添加 escapeProcess=true 让驱动解码。
|
||||
q.Set("escapeProcess", "true")
|
||||
}
|
||||
|
||||
dsn := fmt.Sprintf("dm://%s:%s@%s", config.User, escapedPassword, address)
|
||||
encoded := q.Encode()
|
||||
if encoded == "" {
|
||||
return dsn
|
||||
}
|
||||
return dsn + "?" + encoded
|
||||
}
|
||||
|
||||
func (d *DamengDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := d.getDSN(config)
|
||||
db, err := sql.Open("dm", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
d.conn = db
|
||||
d.pingTimeout = getConnectTimeout(config)
|
||||
if err := d.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) Close() error {
|
||||
if d.conn != nil {
|
||||
return d.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) Ping() error {
|
||||
if d.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := d.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return d.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if d.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := d.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
columns, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) Exec(query string) (int64, error) {
|
||||
if d.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := d.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetDatabases() ([]string, error) {
|
||||
// DM: List Users/Schemas
|
||||
data, _, err := d.Query("SELECT username FROM dba_users")
|
||||
if err != nil {
|
||||
// Fallback if dba_users not accessible
|
||||
data, _, err = d.Query("SELECT username FROM all_users")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["USERNAME"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTables(dbName string) ([]string, error) {
|
||||
query := fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
if dbName == "" {
|
||||
query = "SELECT table_name FROM user_tables"
|
||||
}
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if dbName != "" {
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if val, ok := row["TABLE_NAME"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
// DM: SP_TABLEDEF usually returns definition
|
||||
// Or standard Oracle way if supported.
|
||||
// We'll try a common DM approach.
|
||||
// SELECT DBMS_METADATA.GET_DDL('TABLE', 'TABLE_NAME', 'OWNER') FROM DUAL;
|
||||
|
||||
query := fmt.Sprintf("SELECT DBMS_METADATA.GET_DDL('TABLE', '%s', '%s') as ddl FROM DUAL",
|
||||
strings.ToUpper(tableName), strings.ToUpper(dbName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf("SELECT DBMS_METADATA.GET_DDL('TABLE', '%s') as ddl FROM DUAL", strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["DDL"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, nullable, data_default
|
||||
FROM all_tab_columns
|
||||
WHERE owner = '%s' AND table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf(`SELECT column_name, data_type, nullable, data_default
|
||||
FROM user_tab_columns
|
||||
WHERE table_name = '%s'`, strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
Type: fmt.Sprintf("%v", row["DATA_TYPE"]),
|
||||
Nullable: fmt.Sprintf("%v", row["NULLABLE"]),
|
||||
}
|
||||
|
||||
if row["DATA_DEFAULT"] != nil {
|
||||
def := fmt.Sprintf("%v", row["DATA_DEFAULT"])
|
||||
col.Default = &def
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT index_name, column_name, uniqueness
|
||||
FROM all_ind_columns
|
||||
JOIN all_indexes USING (index_name, owner)
|
||||
WHERE table_owner = '%s' AND table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf(`SELECT index_name, column_name, uniqueness
|
||||
FROM user_ind_columns
|
||||
JOIN user_indexes USING (index_name)
|
||||
WHERE table_name = '%s'`, strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
unique := 1
|
||||
if val, ok := row["UNIQUENESS"]; ok && val == "UNIQUE" {
|
||||
unique = 0
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["INDEX_NAME"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
NonUnique: unique,
|
||||
IndexType: "BTREE",
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
// Reusing Oracle style query as DM is highly compatible
|
||||
query := fmt.Sprintf(`SELECT a.constraint_name, a.column_name, c_pk.table_name r_table_name, b.column_name r_column_name
|
||||
FROM all_cons_columns a
|
||||
JOIN all_constraints c ON a.owner = c.owner AND a.constraint_name = c.constraint_name
|
||||
JOIN all_constraints c_pk ON c.r_owner = c_pk.owner AND c.r_constraint_name = c_pk.constraint_name
|
||||
JOIN all_cons_columns b ON c_pk.owner = b.owner AND c_pk.constraint_name = b.constraint_name AND a.position = b.position
|
||||
WHERE c.constraint_type = 'R' AND a.owner = '%s' AND a.table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
RefTableName: fmt.Sprintf("%v", row["R_TABLE_NAME"]),
|
||||
RefColumnName: fmt.Sprintf("%v", row["R_COLUMN_NAME"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT trigger_name, trigger_type, triggering_event
|
||||
FROM all_triggers
|
||||
WHERE table_owner = '%s' AND table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["TRIGGER_NAME"]),
|
||||
Timing: fmt.Sprintf("%v", row["TRIGGER_TYPE"]),
|
||||
Event: fmt.Sprintf("%v", row["TRIGGERING_EVENT"]),
|
||||
Statement: "SOURCE HIDDEN",
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (d *DamengDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode implemented for Dameng so far")
|
||||
}
|
||||
|
||||
func (d *DamengDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := fmt.Sprintf(`SELECT table_name, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE owner = '%s'`, strings.ToUpper(dbName))
|
||||
|
||||
data, _, err := d.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: fmt.Sprintf("%v", row["TABLE_NAME"]),
|
||||
Name: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
Type: fmt.Sprintf("%v", row["DATA_TYPE"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Database interface {
|
||||
@@ -34,6 +34,14 @@ func NewDatabase(dbType string) (Database, error) {
|
||||
return &PostgresDB{}, nil
|
||||
case "sqlite":
|
||||
return &SQLiteDB{}, nil
|
||||
case "oracle":
|
||||
return &OracleDB{}, nil
|
||||
case "dameng":
|
||||
return &DamengDB{}, nil
|
||||
case "kingbase":
|
||||
return &KingbaseDB{}, nil
|
||||
case "custom":
|
||||
return &CustomDB{}, nil
|
||||
default:
|
||||
// Default to MySQL for backward compatibility if empty
|
||||
if dbType == "" {
|
||||
|
||||
97
internal/db/dsn_test.go
Normal file
97
internal/db/dsn_test.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
func TestPostgresDSN_EscapesPassword(t *testing.T) {
|
||||
p := &PostgresDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "postgres",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5432,
|
||||
User: "user",
|
||||
Password: "p@ss:wo/rd",
|
||||
Database: "db",
|
||||
}
|
||||
|
||||
dsn := p.getDSN(cfg)
|
||||
if strings.Contains(dsn, cfg.Password) {
|
||||
t.Fatalf("dsn 包含原始密码:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "p%40ss%3Awo%2Frd") {
|
||||
t.Fatalf("dsn 未正确转义密码:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "sslmode=disable") {
|
||||
t.Fatalf("dsn 缺少 sslmode 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOracleDSN_EscapesUserAndPassword(t *testing.T) {
|
||||
o := &OracleDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "oracle",
|
||||
Host: "127.0.0.1",
|
||||
Port: 1521,
|
||||
User: "u@ser",
|
||||
Password: "p@ss:wo/rd",
|
||||
Database: "svc/name",
|
||||
}
|
||||
|
||||
dsn := o.getDSN(cfg)
|
||||
if strings.Contains(dsn, cfg.Password) {
|
||||
t.Fatalf("dsn 包含原始密码:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "u%40ser") || !strings.Contains(dsn, "p%40ss%3Awo%2Frd") {
|
||||
t.Fatalf("dsn 未正确转义 user/password:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "/svc%2Fname") {
|
||||
t.Fatalf("dsn 未正确转义 service:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDamengDSN_EscapesPasswordAndEnablesEscapeProcess(t *testing.T) {
|
||||
d := &DamengDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "dameng",
|
||||
Host: "127.0.0.1",
|
||||
Port: 5236,
|
||||
User: "SYSDBA",
|
||||
Password: "p@ss:wo/rd",
|
||||
Database: "DBName",
|
||||
}
|
||||
|
||||
dsn := d.getDSN(cfg)
|
||||
if strings.Contains(dsn, cfg.Password) {
|
||||
t.Fatalf("dsn 包含原始密码:%s", dsn)
|
||||
}
|
||||
if strings.Contains(dsn, "wo/rd") || !strings.Contains(dsn, "wo%2Frd") {
|
||||
t.Fatalf("dsn 未按达梦驱动要求转义密码(至少应转义 '/'):%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "escapeProcess=true") {
|
||||
t.Fatalf("dsn 缺少 escapeProcess=true:%s", dsn)
|
||||
}
|
||||
if !strings.Contains(dsn, "schema=DBName") {
|
||||
t.Fatalf("dsn 缺少 schema 参数:%s", dsn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKingbaseDSN_QuotesPasswordWithSpaces(t *testing.T) {
|
||||
k := &KingbaseDB{}
|
||||
cfg := connection.ConnectionConfig{
|
||||
Type: "kingbase",
|
||||
Host: "127.0.0.1",
|
||||
Port: 54321,
|
||||
User: "system",
|
||||
Password: "p@ss word",
|
||||
Database: "TEST",
|
||||
}
|
||||
|
||||
dsn := k.getDSN(cfg)
|
||||
if !strings.Contains(dsn, "password='p@ss word'") {
|
||||
t.Fatalf("dsn 未对包含空格的密码进行引号包裹:%s", dsn)
|
||||
}
|
||||
}
|
||||
407
internal/db/kingbase_impl.go
Normal file
407
internal/db/kingbase_impl.go
Normal file
@@ -0,0 +1,407 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "gitea.com/kingbase/gokb" // Registers "kingbase" driver
|
||||
)
|
||||
|
||||
type KingbaseDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func quoteConnValue(v string) string {
|
||||
if v == "" {
|
||||
return "''"
|
||||
}
|
||||
|
||||
needsQuote := false
|
||||
for _, r := range v {
|
||||
switch r {
|
||||
case ' ', '\t', '\n', '\r', '\v', '\f', '\'', '\\':
|
||||
needsQuote = true
|
||||
}
|
||||
if needsQuote {
|
||||
break
|
||||
}
|
||||
}
|
||||
if !needsQuote {
|
||||
return v
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.Grow(len(v) + 2)
|
||||
b.WriteByte('\'')
|
||||
for _, r := range v {
|
||||
if r == '\\' || r == '\'' {
|
||||
b.WriteByte('\\')
|
||||
}
|
||||
b.WriteRune(r)
|
||||
}
|
||||
b.WriteByte('\'')
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// Kingbase DSN usually similar to Postgres:
|
||||
// host=localhost port=54321 user=system password=... dbname=TEST sslmode=disable
|
||||
|
||||
address := config.Host
|
||||
port := config.Port
|
||||
|
||||
if config.UseSSH {
|
||||
netName, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// Kingbase/Postgres lib/pq allows custom dialer via "host" if using unix socket,
|
||||
// but for custom network it's harder.
|
||||
// Ideally we use a local forwarder.
|
||||
// For now, we assume standard TCP or handle SSH externally.
|
||||
// If we implement the net.Dial override for "kingbase" driver (which might use lib/pq internally),
|
||||
// we might need to check if it supports "cloudsql" style or similar custom dialers.
|
||||
// Similar to others, skipping SSH deep integration here for now.
|
||||
_ = netName
|
||||
}
|
||||
}
|
||||
|
||||
// Construct DSN
|
||||
dsn := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d",
|
||||
quoteConnValue(address),
|
||||
port,
|
||||
quoteConnValue(config.User),
|
||||
quoteConnValue(config.Password),
|
||||
quoteConnValue(config.Database),
|
||||
getConnectTimeoutSeconds(config),
|
||||
)
|
||||
|
||||
return dsn
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := k.getDSN(config)
|
||||
// Open using "kingbase" driver
|
||||
db, err := sql.Open("kingbase", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
k.conn = db
|
||||
k.pingTimeout = getConnectTimeout(config)
|
||||
if err := k.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Close() error {
|
||||
if k.conn != nil {
|
||||
return k.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Ping() error {
|
||||
if k.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := k.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return k.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if k.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := k.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
columns, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) Exec(query string) (int64, error) {
|
||||
if k.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := k.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetDatabases() ([]string, error) {
|
||||
// Postgres/Kingbase style
|
||||
data, _, err := k.Query("SELECT datname FROM pg_database WHERE datistemplate = false")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["datname"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetTables(dbName string) ([]string, error) {
|
||||
// Kingbase: tables are scoped by the current DB connection; include schema to avoid search_path issues.
|
||||
query := `
|
||||
SELECT table_schema AS schemaname, table_name AS tablename
|
||||
FROM information_schema.tables
|
||||
WHERE table_type = 'BASE TABLE'
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_schema NOT LIKE 'pg_%'
|
||||
ORDER BY table_schema, table_name`
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if val, ok := row["table_name"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
// Kingbase doesn't have "SHOW CREATE TABLE".
|
||||
// We can try pg_dump logic or use a query to reconstruction.
|
||||
// A simple approach is just returning basic info or "Not Supported".
|
||||
// Or we can query information_schema to build it.
|
||||
return "SHOW CREATE TABLE not directly supported in Kingbase/Postgres via SQL", nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, is_nullable, column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s' AND table_name = '%s'
|
||||
ORDER BY ordinal_position`, schema, tableName)
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
Nullable: fmt.Sprintf("%v", row["is_nullable"]),
|
||||
}
|
||||
|
||||
if row["column_default"] != nil {
|
||||
def := fmt.Sprintf("%v", row["column_default"])
|
||||
col.Default = &def
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
// Postgres/Kingbase index query
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
a.attname as column_name,
|
||||
ix.indisunique as is_unique
|
||||
FROM
|
||||
pg_class t,
|
||||
pg_class i,
|
||||
pg_index ix,
|
||||
pg_attribute a,
|
||||
pg_namespace n
|
||||
WHERE
|
||||
t.oid = ix.indrelid
|
||||
AND i.oid = ix.indexrelid
|
||||
AND a.attrelid = t.oid
|
||||
AND a.attnum = ANY(ix.indkey)
|
||||
AND t.relkind = 'r'
|
||||
AND t.relname = '%s'
|
||||
AND n.oid = t.relnamespace
|
||||
AND n.nspname = '%s'
|
||||
`, tableName, "public") // Default to public if dbName (schema) not clear.
|
||||
|
||||
if dbName != "" {
|
||||
// Update query to use dbName as schema
|
||||
query = strings.Replace(query, "'public'", fmt.Sprintf("'%s'", dbName), 1)
|
||||
}
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
nonUnique := 1
|
||||
if val, ok := row["is_unique"]; ok {
|
||||
if b, ok := val.(bool); ok && b {
|
||||
nonUnique = 0
|
||||
}
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["index_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
NonUnique: nonUnique,
|
||||
IndexType: "BTREE", // Default
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
tc.constraint_name,
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM
|
||||
information_schema.table_constraints AS tc
|
||||
JOIN information_schema.key_column_usage AS kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name='%s' AND tc.table_schema='%s'`,
|
||||
tableName, schema)
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["column_name"]),
|
||||
RefTableName: fmt.Sprintf("%v", row["foreign_table_name"]),
|
||||
RefColumnName: fmt.Sprintf("%v", row["foreign_column_name"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["constraint_name"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT trigger_name, action_timing, event_manipulation
|
||||
FROM information_schema.triggers
|
||||
WHERE event_object_table = '%s'`, tableName)
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["trigger_name"]),
|
||||
Timing: fmt.Sprintf("%v", row["action_timing"]),
|
||||
Event: fmt.Sprintf("%v", row["event_manipulation"]),
|
||||
Statement: "SOURCE HIDDEN",
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
return fmt.Errorf("read-only mode implemented for Kingbase so far")
|
||||
}
|
||||
|
||||
func (k *KingbaseDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
schema := "public"
|
||||
if dbName != "" {
|
||||
schema = dbName
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = '%s'`, schema)
|
||||
|
||||
data, _, err := k.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: fmt.Sprintf("%v", row["table_name"]),
|
||||
Name: fmt.Sprintf("%v", row["column_name"]),
|
||||
Type: fmt.Sprintf("%v", row["data_type"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
@@ -14,7 +15,8 @@ import (
|
||||
)
|
||||
|
||||
type MySQLDB struct {
|
||||
conn *sql.DB
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (m *MySQLDB) getDSN(config connection.ConnectionConfig) string {
|
||||
@@ -27,23 +29,31 @@ func (m *MySQLDB) getDSN(config connection.ConnectionConfig) string {
|
||||
if err == nil {
|
||||
protocol = netName
|
||||
address = fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
} else {
|
||||
logger.Warnf("注册 SSH 网络失败,将尝试直连:地址=%s:%d 用户=%s,原因:%v", config.Host, config.Port, config.User, err)
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local",
|
||||
config.User, config.Password, protocol, address, database)
|
||||
timeout := getConnectTimeoutSeconds(config)
|
||||
|
||||
return fmt.Sprintf("%s:%s@%s(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local&timeout=%ds",
|
||||
config.User, config.Password, protocol, address, database, timeout)
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := m.getDSN(config)
|
||||
db, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
m.conn = db
|
||||
|
||||
m.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
// Force verification
|
||||
return m.Ping()
|
||||
if err := m.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MySQLDB) Close() error {
|
||||
@@ -57,7 +67,11 @@ func (m *MySQLDB) Ping() error {
|
||||
if m.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(5 * time.Second)
|
||||
timeout := m.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return m.conn.PingContext(ctx)
|
||||
}
|
||||
@@ -93,15 +107,7 @@ func (m *MySQLDB) Query(query string) ([]map[string]interface{}, []string, error
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -141,12 +147,12 @@ func (m *MySQLDB) GetTables(dbName string) ([]string, error) {
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
for _, v := range row {
|
||||
@@ -167,7 +173,7 @@ func (m *MySQLDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["Create Table"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
@@ -197,12 +203,12 @@ func (m *MySQLDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefin
|
||||
Extra: fmt.Sprintf("%v", row["Extra"]),
|
||||
Comment: fmt.Sprintf("%v", row["Comment"]),
|
||||
}
|
||||
|
||||
|
||||
if row["Default"] != nil {
|
||||
d := fmt.Sprintf("%v", row["Default"])
|
||||
col.Default = &d
|
||||
}
|
||||
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
@@ -230,14 +236,14 @@ func (m *MySQLDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefini
|
||||
}
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
if f, ok := val.(float64); ok {
|
||||
seq = int(f)
|
||||
} else if i, ok := val.(int64); ok {
|
||||
seq = int(i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Key_name"]),
|
||||
@@ -327,12 +333,12 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range update.Values {
|
||||
sets = append(sets, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -342,7 +348,7 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
@@ -358,13 +364,13 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range row {
|
||||
cols = append(cols, fmt.Sprintf("`%s`", k))
|
||||
placeholders = append(placeholders, "?")
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
366
internal/db/oracle_impl.go
Normal file
366
internal/db/oracle_impl.go
Normal file
@@ -0,0 +1,366 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/ssh"
|
||||
"GoNavi-Wails/internal/utils"
|
||||
|
||||
_ "github.com/sijms/go-ora/v2"
|
||||
)
|
||||
|
||||
type OracleDB struct {
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (o *OracleDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// oracle://user:pass@host:port/service_name
|
||||
database := config.Database
|
||||
if database == "" {
|
||||
database = config.User // Default to user service/schema if empty?
|
||||
}
|
||||
|
||||
if config.UseSSH {
|
||||
_, err := ssh.RegisterSSHNetwork(config.SSH)
|
||||
if err == nil {
|
||||
// Oracle driver might not support custom dialer via DSN easily without extra config
|
||||
// But go-ora v2 supports some advanced options.
|
||||
// For simplicity, we assume standard TCP or we might need a workaround for SSH.
|
||||
// go-ora v2 is pure Go, so we can potentially use a custom dialer if we manually open.
|
||||
// But for now, let's just use the address.
|
||||
// SSH tunneling via net.Dialer override is complex in sql.Open("oracle", ...).
|
||||
// We might need to forward a local port if using SSH.
|
||||
// Since ssh.RegisterSSHNetwork creates a custom network "ssh-via-...",
|
||||
// we need to see if go-ora supports custom networks.
|
||||
// Checking go-ora docs (simulated): It supports "unix" and "tcp".
|
||||
// We might need to map the custom network to a local proxy.
|
||||
// For now, we will assume direct connection or handle SSH separately later.
|
||||
// We'll leave the protocol implementation as is in MySQL for now, hoping go-ora uses standard net.Dial.
|
||||
// Note: go-ora connection string: oracle://user:pass@host:port/service
|
||||
// It parses host/port. It doesn't easily take a custom "network" parameter in URL.
|
||||
// We will proceed with standard TCP string.
|
||||
}
|
||||
}
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "oracle",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
Path: "/" + database,
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
u.RawPath = "/" + url.PathEscape(database)
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (o *OracleDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := o.getDSN(config)
|
||||
db, err := sql.Open("oracle", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
o.conn = db
|
||||
o.pingTimeout = getConnectTimeout(config)
|
||||
if err := o.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) Close() error {
|
||||
if o.conn != nil {
|
||||
return o.conn.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) Ping() error {
|
||||
if o.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
timeout := o.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return o.conn.PingContext(ctx)
|
||||
}
|
||||
|
||||
func (o *OracleDB) Query(query string) ([]map[string]interface{}, []string, error) {
|
||||
if o.conn == nil {
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
rows, err := o.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
columns, err := rows.Columns()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
var resultData []map[string]interface{}
|
||||
|
||||
for rows.Next() {
|
||||
values := make([]interface{}, len(columns))
|
||||
valuePtrs := make([]interface{}, len(columns))
|
||||
for i := range columns {
|
||||
valuePtrs[i] = &values[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(valuePtrs...); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
return resultData, columns, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) Exec(query string) (int64, error) {
|
||||
if o.conn == nil {
|
||||
return 0, fmt.Errorf("connection not open")
|
||||
}
|
||||
res, err := o.conn.Exec(query)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetDatabases() ([]string, error) {
|
||||
// Oracle treats Users/Schemas as "Databases" in this context
|
||||
data, _, err := o.Query("SELECT username FROM all_users ORDER BY username")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dbs []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["USERNAME"]; ok {
|
||||
dbs = append(dbs, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return dbs, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetTables(dbName string) ([]string, error) {
|
||||
// dbName is Schema/Owner
|
||||
query := "SELECT table_name FROM user_tables"
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SELECT owner, table_name FROM all_tables WHERE owner = '%s' ORDER BY table_name", strings.ToUpper(dbName))
|
||||
}
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if dbName != "" {
|
||||
if owner, okOwner := row["OWNER"]; okOwner {
|
||||
if name, okName := row["TABLE_NAME"]; okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", owner, name))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if val, ok := row["TABLE_NAME"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
// Oracle provides DBMS_METADATA.GET_DDL
|
||||
// Note: LONG type might be tricky, but basic string scan should work for smaller DDLs
|
||||
query := fmt.Sprintf("SELECT DBMS_METADATA.GET_DDL('TABLE', '%s', '%s') as ddl FROM DUAL",
|
||||
strings.ToUpper(tableName), strings.ToUpper(dbName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf("SELECT DBMS_METADATA.GET_DDL('TABLE', '%s') as ddl FROM DUAL", strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["DDL"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("create statement not found")
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT column_name, data_type, nullable, data_default
|
||||
FROM all_tab_columns
|
||||
WHERE owner = '%s' AND table_name = '%s'
|
||||
ORDER BY column_id`, strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf(`SELECT column_name, data_type, nullable, data_default
|
||||
FROM user_tab_columns
|
||||
WHERE table_name = '%s'
|
||||
ORDER BY column_id`, strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columns []connection.ColumnDefinition
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinition{
|
||||
Name: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
Type: fmt.Sprintf("%v", row["DATA_TYPE"]),
|
||||
Nullable: fmt.Sprintf("%v", row["NULLABLE"]),
|
||||
}
|
||||
|
||||
if row["DATA_DEFAULT"] != nil {
|
||||
d := fmt.Sprintf("%v", row["DATA_DEFAULT"])
|
||||
col.Default = &d
|
||||
}
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT index_name, column_name, uniqueness
|
||||
FROM all_ind_columns
|
||||
JOIN all_indexes USING (index_name, owner)
|
||||
WHERE table_owner = '%s' AND table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
if dbName == "" {
|
||||
query = fmt.Sprintf(`SELECT index_name, column_name, uniqueness
|
||||
FROM user_ind_columns
|
||||
JOIN user_indexes USING (index_name)
|
||||
WHERE table_name = '%s'`, strings.ToUpper(tableName))
|
||||
}
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var indexes []connection.IndexDefinition
|
||||
for _, row := range data {
|
||||
unique := 1
|
||||
if val, ok := row["UNIQUENESS"]; ok && val == "UNIQUE" {
|
||||
unique = 0
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["INDEX_NAME"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
NonUnique: unique,
|
||||
// SeqInIndex is harder to get in simple join, omitting or estimating
|
||||
IndexType: "BTREE", // Default assumption
|
||||
}
|
||||
indexes = append(indexes, idx)
|
||||
}
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetForeignKeys(dbName, tableName string) ([]connection.ForeignKeyDefinition, error) {
|
||||
// Simplified query for FKs
|
||||
query := fmt.Sprintf(`SELECT a.constraint_name, a.column_name, c_pk.table_name r_table_name, b.column_name r_column_name
|
||||
FROM all_cons_columns a
|
||||
JOIN all_constraints c ON a.owner = c.owner AND a.constraint_name = c.constraint_name
|
||||
JOIN all_constraints c_pk ON c.r_owner = c_pk.owner AND c.r_constraint_name = c_pk.constraint_name
|
||||
JOIN all_cons_columns b ON c_pk.owner = b.owner AND c_pk.constraint_name = b.constraint_name AND a.position = b.position
|
||||
WHERE c.constraint_type = 'R' AND a.owner = '%s' AND a.table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fks []connection.ForeignKeyDefinition
|
||||
for _, row := range data {
|
||||
fk := connection.ForeignKeyDefinition{
|
||||
Name: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
ColumnName: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
RefTableName: fmt.Sprintf("%v", row["R_TABLE_NAME"]),
|
||||
RefColumnName: fmt.Sprintf("%v", row["R_COLUMN_NAME"]),
|
||||
ConstraintName: fmt.Sprintf("%v", row["CONSTRAINT_NAME"]),
|
||||
}
|
||||
fks = append(fks, fk)
|
||||
}
|
||||
return fks, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetTriggers(dbName, tableName string) ([]connection.TriggerDefinition, error) {
|
||||
query := fmt.Sprintf(`SELECT trigger_name, trigger_type, triggering_event
|
||||
FROM all_triggers
|
||||
WHERE table_owner = '%s' AND table_name = '%s'`,
|
||||
strings.ToUpper(dbName), strings.ToUpper(tableName))
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var triggers []connection.TriggerDefinition
|
||||
for _, row := range data {
|
||||
trig := connection.TriggerDefinition{
|
||||
Name: fmt.Sprintf("%v", row["TRIGGER_NAME"]),
|
||||
Timing: fmt.Sprintf("%v", row["TRIGGER_TYPE"]),
|
||||
Event: fmt.Sprintf("%v", row["TRIGGERING_EVENT"]),
|
||||
Statement: "SOURCE HIDDEN", // Requires more complex query to get body
|
||||
}
|
||||
triggers = append(triggers, trig)
|
||||
}
|
||||
return triggers, nil
|
||||
}
|
||||
|
||||
func (o *OracleDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
|
||||
// TODO: Implement batch application for Oracle using correct syntax
|
||||
return fmt.Errorf("read-only mode implemented for Oracle so far")
|
||||
}
|
||||
|
||||
func (o *OracleDB) GetAllColumns(dbName string) ([]connection.ColumnDefinitionWithTable, error) {
|
||||
query := fmt.Sprintf(`SELECT table_name, column_name, data_type
|
||||
FROM all_tab_columns
|
||||
WHERE owner = '%s'`, strings.ToUpper(dbName))
|
||||
|
||||
data, _, err := o.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cols []connection.ColumnDefinitionWithTable
|
||||
for _, row := range data {
|
||||
col := connection.ColumnDefinitionWithTable{
|
||||
TableName: fmt.Sprintf("%v", row["TABLE_NAME"]),
|
||||
Name: fmt.Sprintf("%v", row["COLUMN_NAME"]),
|
||||
Type: fmt.Sprintf("%v", row["DATA_TYPE"]),
|
||||
}
|
||||
cols = append(cols, col)
|
||||
}
|
||||
return cols, nil
|
||||
}
|
||||
@@ -3,6 +3,9 @@ package db
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
@@ -12,37 +15,45 @@ import (
|
||||
)
|
||||
|
||||
type PostgresDB struct {
|
||||
conn *sql.DB
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (p *PostgresDB) getDSN(config connection.ConnectionConfig) string {
|
||||
// postgres://user:password@host:port/dbname?sslmode=disable
|
||||
host := config.Host
|
||||
port := config.Port
|
||||
// SSH placeholder kept from original
|
||||
if config.UseSSH {
|
||||
// Logic to be implemented
|
||||
}
|
||||
|
||||
dbname := config.Database
|
||||
if dbname == "" {
|
||||
dbname = "postgres" // Default DB
|
||||
}
|
||||
|
||||
return fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=disable",
|
||||
config.User, config.Password, host, port, dbname)
|
||||
u := &url.URL{
|
||||
Scheme: "postgres",
|
||||
Host: net.JoinHostPort(config.Host, strconv.Itoa(config.Port)),
|
||||
Path: "/" + dbname,
|
||||
}
|
||||
u.User = url.UserPassword(config.User, config.Password)
|
||||
q := url.Values{}
|
||||
q.Set("sslmode", "disable")
|
||||
q.Set("connect_timeout", strconv.Itoa(getConnectTimeoutSeconds(config)))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := p.getDSN(config)
|
||||
db, err := sql.Open("postgres", dsn)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
p.conn = db
|
||||
|
||||
p.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
// Force verification
|
||||
return p.Ping()
|
||||
if err := p.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PostgresDB) Close() error {
|
||||
@@ -56,7 +67,11 @@ func (p *PostgresDB) Ping() error {
|
||||
if p.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(5 * time.Second)
|
||||
timeout := p.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return p.conn.PingContext(ctx)
|
||||
}
|
||||
@@ -66,8 +81,7 @@ func (p *PostgresDB) Query(query string) ([]map[string]interface{}, []string, er
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
|
||||
rows, err := p.conn.Query(query)
|
||||
rows, err := p.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -93,15 +107,7 @@ rows, err := p.conn.Query(query)
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -135,16 +141,22 @@ func (p *PostgresDB) GetDatabases() ([]string, error) {
|
||||
}
|
||||
|
||||
func (p *PostgresDB) GetTables(dbName string) ([]string, error) {
|
||||
query := "SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'"
|
||||
query := "SELECT schemaname, tablename FROM pg_catalog.pg_tables WHERE schemaname != 'information_schema' AND schemaname NOT LIKE 'pg_%' ORDER BY schemaname, tablename"
|
||||
data, _, err := p.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["tablename"]; ok {
|
||||
tables = append(tables, fmt.Sprintf("%v", val))
|
||||
schema, okSchema := row["schemaname"]
|
||||
name, okName := row["tablename"]
|
||||
if okSchema && okName {
|
||||
tables = append(tables, fmt.Sprintf("%v.%v", schema, name))
|
||||
continue
|
||||
}
|
||||
if okName {
|
||||
tables = append(tables, fmt.Sprintf("%v", name))
|
||||
}
|
||||
}
|
||||
return tables, nil
|
||||
|
||||
58
internal/db/query_value.go
Normal file
58
internal/db/query_value.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// normalizeQueryValue normalizes driver-returned values for UI/JSON transport.
|
||||
// 当前主要处理 []byte:如果是可读文本则转为 string,否则转为十六进制字符串,避免前端出现“空白值”。
|
||||
func normalizeQueryValue(v interface{}) interface{} {
|
||||
if b, ok := v.([]byte); ok {
|
||||
return bytesToReadableString(b)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func bytesToReadableString(b []byte) interface{} {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
if utf8.Valid(b) {
|
||||
s := string(b)
|
||||
if isMostlyPrintable(s) {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
return "0x" + hex.EncodeToString(b)
|
||||
}
|
||||
|
||||
func isMostlyPrintable(s string) bool {
|
||||
if s == "" {
|
||||
return true
|
||||
}
|
||||
|
||||
total := 0
|
||||
printable := 0
|
||||
for _, r := range s {
|
||||
total++
|
||||
switch r {
|
||||
case '\n', '\r', '\t':
|
||||
printable++
|
||||
continue
|
||||
default:
|
||||
}
|
||||
if unicode.IsPrint(r) {
|
||||
printable++
|
||||
}
|
||||
}
|
||||
|
||||
// 允许少量不可见字符,避免把正常文本误判为二进制。
|
||||
return printable*100 >= total*90
|
||||
}
|
||||
@@ -12,19 +12,24 @@ import (
|
||||
)
|
||||
|
||||
type SQLiteDB struct {
|
||||
conn *sql.DB
|
||||
conn *sql.DB
|
||||
pingTimeout time.Duration
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := config.Host
|
||||
dsn := config.Host
|
||||
db, err := sql.Open("sqlite", dsn)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
s.conn = db
|
||||
|
||||
s.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
// Force verification
|
||||
return s.Ping()
|
||||
if err := s.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Close() error {
|
||||
@@ -38,7 +43,11 @@ func (s *SQLiteDB) Ping() error {
|
||||
if s.conn == nil {
|
||||
return fmt.Errorf("connection not open")
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(5 * time.Second)
|
||||
timeout := s.pingTimeout
|
||||
if timeout <= 0 {
|
||||
timeout = 5 * time.Second
|
||||
}
|
||||
ctx, cancel := utils.ContextWithTimeout(timeout)
|
||||
defer cancel()
|
||||
return s.conn.PingContext(ctx)
|
||||
}
|
||||
@@ -74,15 +83,7 @@ func (s *SQLiteDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
v = string(b)
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -111,7 +112,7 @@ func (s *SQLiteDB) GetTables(dbName string) ([]string, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["name"]; ok {
|
||||
|
||||
22
internal/db/timeout.go
Normal file
22
internal/db/timeout.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
)
|
||||
|
||||
const defaultConnectTimeoutSeconds = 30
|
||||
|
||||
func getConnectTimeoutSeconds(config connection.ConnectionConfig) int {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = defaultConnectTimeoutSeconds
|
||||
}
|
||||
return timeoutSeconds
|
||||
}
|
||||
|
||||
func getConnectTimeout(config connection.ConnectionConfig) time.Duration {
|
||||
return time.Duration(getConnectTimeoutSeconds(config)) * time.Second
|
||||
}
|
||||
|
||||
197
internal/logger/logger.go
Normal file
197
internal/logger/logger.go
Normal file
@@ -0,0 +1,197 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
envLogDir = "GONAVI_LOG_DIR"
|
||||
appDirName = "GoNavi"
|
||||
|
||||
logFileName = "gonavi.log"
|
||||
logRotateMaxBytes = 10 * 1024 * 1024 // 10MB
|
||||
logRotateMaxBackups = 10
|
||||
)
|
||||
|
||||
var (
|
||||
once sync.Once
|
||||
logMu sync.Mutex
|
||||
logInst *log.Logger
|
||||
logFile *os.File
|
||||
logPath string
|
||||
)
|
||||
|
||||
func Init() {
|
||||
once.Do(func() {
|
||||
path, out := initOutput()
|
||||
logMu.Lock()
|
||||
defer logMu.Unlock()
|
||||
logPath = path
|
||||
logInst = log.New(out, "", log.Ldate|log.Ltime|log.Lmicroseconds)
|
||||
logInst.Printf("[信息] 日志初始化完成,日志文件:%s", logPath)
|
||||
})
|
||||
}
|
||||
|
||||
func Path() string {
|
||||
Init()
|
||||
logMu.Lock()
|
||||
defer logMu.Unlock()
|
||||
return logPath
|
||||
}
|
||||
|
||||
func Close() {
|
||||
Init()
|
||||
logMu.Lock()
|
||||
defer logMu.Unlock()
|
||||
if logInst != nil {
|
||||
logInst.SetOutput(os.Stderr)
|
||||
}
|
||||
if logFile != nil {
|
||||
_ = logFile.Close()
|
||||
logFile = nil
|
||||
}
|
||||
}
|
||||
|
||||
func Infof(format string, args ...any) {
|
||||
printf("信息", format, args...)
|
||||
}
|
||||
|
||||
func Warnf(format string, args ...any) {
|
||||
printf("警告", format, args...)
|
||||
}
|
||||
|
||||
func Errorf(format string, args ...any) {
|
||||
printf("错误", format, args...)
|
||||
}
|
||||
|
||||
func Error(err error, format string, args ...any) {
|
||||
msg := fmt.Sprintf(format, args...)
|
||||
if err == nil {
|
||||
Errorf("%s", msg)
|
||||
return
|
||||
}
|
||||
Errorf("%s;错误链:%s", msg, ErrorChain(err))
|
||||
}
|
||||
|
||||
func ErrorChain(err error) string {
|
||||
if err == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
var parts []string
|
||||
seen := map[string]struct{}{}
|
||||
cur := err
|
||||
truncated := false
|
||||
for i := 0; cur != nil && i < 20; i++ {
|
||||
s := cur.Error()
|
||||
if _, ok := seen[s]; !ok {
|
||||
seen[s] = struct{}{}
|
||||
parts = append(parts, s)
|
||||
}
|
||||
cur = errors.Unwrap(cur)
|
||||
}
|
||||
if cur != nil {
|
||||
truncated = true
|
||||
}
|
||||
|
||||
if len(parts) == 0 {
|
||||
return err.Error()
|
||||
}
|
||||
if truncated {
|
||||
parts = append(parts, "(错误链过长,已截断)")
|
||||
}
|
||||
return strings.Join(parts, " -> ")
|
||||
}
|
||||
|
||||
func printf(level string, format string, args ...any) {
|
||||
Init()
|
||||
logMu.Lock()
|
||||
inst := logInst
|
||||
logMu.Unlock()
|
||||
if inst == nil {
|
||||
return
|
||||
}
|
||||
inst.Printf("[%s] %s", level, fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
func initOutput() (string, io.Writer) {
|
||||
dir := strings.TrimSpace(os.Getenv(envLogDir))
|
||||
if dir == "" {
|
||||
base, err := os.UserConfigDir()
|
||||
if err != nil || strings.TrimSpace(base) == "" {
|
||||
base = os.TempDir()
|
||||
}
|
||||
dir = filepath.Join(base, appDirName, "logs")
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||
return filepath.Join(dir, logFileName), os.Stderr
|
||||
}
|
||||
|
||||
path := filepath.Join(dir, logFileName)
|
||||
rotateIfNeeded(path, dir)
|
||||
|
||||
f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0o644)
|
||||
if err != nil {
|
||||
return path, os.Stderr
|
||||
}
|
||||
logFile = f
|
||||
return path, f
|
||||
}
|
||||
|
||||
func rotateIfNeeded(path, dir string) {
|
||||
fi, err := os.Stat(path)
|
||||
if err != nil || fi.IsDir() {
|
||||
return
|
||||
}
|
||||
if fi.Size() < logRotateMaxBytes {
|
||||
return
|
||||
}
|
||||
|
||||
ts := time.Now().Format("20060102-150405")
|
||||
rotated := filepath.Join(dir, fmt.Sprintf("gonavi-%s.log", ts))
|
||||
if err := os.Rename(path, rotated); err != nil {
|
||||
return
|
||||
}
|
||||
cleanupOldLogs(dir)
|
||||
}
|
||||
|
||||
func cleanupOldLogs(dir string) {
|
||||
entries, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
type item struct {
|
||||
name string
|
||||
path string
|
||||
}
|
||||
var logs []item
|
||||
for _, e := range entries {
|
||||
if e.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := e.Name()
|
||||
if !strings.HasPrefix(name, "gonavi-") || !strings.HasSuffix(name, ".log") {
|
||||
continue
|
||||
}
|
||||
logs = append(logs, item{name: name, path: filepath.Join(dir, name)})
|
||||
}
|
||||
|
||||
sort.Slice(logs, func(i, j int) bool { return logs[i].name > logs[j].name })
|
||||
if len(logs) <= logRotateMaxBackups {
|
||||
return
|
||||
}
|
||||
for _, it := range logs[logRotateMaxBackups:] {
|
||||
_ = os.Remove(it.path)
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
|
||||
"github.com/go-sql-driver/mysql"
|
||||
"golang.org/x/crypto/ssh"
|
||||
@@ -19,18 +20,49 @@ type ViaSSHDialer struct {
|
||||
}
|
||||
|
||||
func (d *ViaSSHDialer) Dial(ctx context.Context, addr string) (net.Conn, error) {
|
||||
return d.sshClient.Dial("tcp", addr)
|
||||
return dialContext(ctx, d.sshClient, "tcp", addr)
|
||||
}
|
||||
|
||||
func dialContext(ctx context.Context, client *ssh.Client, network, addr string) (net.Conn, error) {
|
||||
type result struct {
|
||||
conn net.Conn
|
||||
err error
|
||||
}
|
||||
|
||||
ch := make(chan result, 1)
|
||||
go func() {
|
||||
c, err := client.Dial(network, addr)
|
||||
ch <- result{conn: c, err: err}
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
go func() {
|
||||
r := <-ch
|
||||
if r.conn != nil {
|
||||
_ = r.conn.Close()
|
||||
}
|
||||
}()
|
||||
return nil, ctx.Err()
|
||||
case r := <-ch:
|
||||
return r.conn, r.err
|
||||
}
|
||||
}
|
||||
|
||||
// connectSSH establishes an SSH connection and returns a Dialer
|
||||
func connectSSH(config connection.SSHConfig) (*ssh.Client, error) {
|
||||
logger.Infof("开始建立 SSH 连接:地址=%s:%d 用户=%s", config.Host, config.Port, config.User)
|
||||
authMethods := []ssh.AuthMethod{}
|
||||
|
||||
if config.KeyPath != "" {
|
||||
key, err := os.ReadFile(config.KeyPath)
|
||||
if err == nil {
|
||||
if err != nil {
|
||||
logger.Warnf("读取 SSH 私钥失败:路径=%s,原因:%v", config.KeyPath, err)
|
||||
} else {
|
||||
signer, err := ssh.ParsePrivateKey(key)
|
||||
if err == nil {
|
||||
if err != nil {
|
||||
logger.Warnf("解析 SSH 私钥失败:路径=%s,原因:%v", config.KeyPath, err)
|
||||
} else {
|
||||
authMethods = append(authMethods, ssh.PublicKeys(signer))
|
||||
}
|
||||
}
|
||||
@@ -39,6 +71,9 @@ func connectSSH(config connection.SSHConfig) (*ssh.Client, error) {
|
||||
if config.Password != "" {
|
||||
authMethods = append(authMethods, ssh.Password(config.Password))
|
||||
}
|
||||
if len(authMethods) == 0 {
|
||||
logger.Warnf("SSH 未配置认证方式(密码或私钥)")
|
||||
}
|
||||
|
||||
sshConfig := &ssh.ClientConfig{
|
||||
User: config.User,
|
||||
@@ -48,7 +83,13 @@ func connectSSH(config connection.SSHConfig) (*ssh.Client, error) {
|
||||
}
|
||||
|
||||
addr := fmt.Sprintf("%s:%d", config.Host, config.Port)
|
||||
return ssh.Dial("tcp", addr, sshConfig)
|
||||
client, err := ssh.Dial("tcp", addr, sshConfig)
|
||||
if err != nil {
|
||||
logger.Error(err, "SSH 连接建立失败:地址=%s 用户=%s", addr, config.User)
|
||||
return nil, err
|
||||
}
|
||||
logger.Infof("SSH 连接建立成功:地址=%s 用户=%s", addr, config.User)
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// RegisterSSHNetwork registers a unique network name for a specific SSH tunnel
|
||||
@@ -61,9 +102,10 @@ func RegisterSSHNetwork(sshConfig connection.SSHConfig) (string, error) {
|
||||
|
||||
// Generate unique network name
|
||||
netName := fmt.Sprintf("ssh_%s_%d", sshConfig.Host, time.Now().UnixNano())
|
||||
logger.Infof("注册 SSH 网络:%s(地址=%s:%d 用户=%s)", netName, sshConfig.Host, sshConfig.Port, sshConfig.User)
|
||||
|
||||
mysql.RegisterDialContext(netName, func(ctx context.Context, addr string) (net.Conn, error) {
|
||||
return client.Dial("tcp", addr)
|
||||
return dialContext(ctx, client, "tcp", addr)
|
||||
})
|
||||
|
||||
return netName, nil
|
||||
|
||||
198
internal/sync/analyze.go
Normal file
198
internal/sync/analyze.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TableDiffSummary struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn,omitempty"`
|
||||
CanSync bool `json:"canSync"`
|
||||
Inserts int `json:"inserts"`
|
||||
Updates int `json:"updates"`
|
||||
Deletes int `json:"deletes"`
|
||||
Same int `json:"same"`
|
||||
Message string `json:"message,omitempty"`
|
||||
HasSchema bool `json:"hasSchema,omitempty"`
|
||||
}
|
||||
|
||||
type SyncAnalyzeResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Tables []TableDiffSummary `json:"tables"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Analyze(config SyncConfig) SyncAnalyzeResult {
|
||||
result := SyncAnalyzeResult{Success: true, Tables: []TableDiffSummary{}}
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, nil, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "差异分析开始")
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化源数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化目标数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "源数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "目标数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("分析表(%d/%d)", i+1, totalTables))
|
||||
|
||||
summary := TableDiffSummary{
|
||||
Table: tableName,
|
||||
CanSync: false,
|
||||
Inserts: 0,
|
||||
Updates: 0,
|
||||
Deletes: 0,
|
||||
Same: 0,
|
||||
Message: "",
|
||||
HasSchema: syncSchema,
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
summary.Message = "获取源表字段失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
if !syncData {
|
||||
summary.CanSync = true
|
||||
summary.Message = "仅同步结构,未执行数据差异分析"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
summary.Message = "无主键,不支持数据对比/同步"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
summary.Message = fmt.Sprintf("复合主键(%s),暂不支持数据对比/同步", strings.Join(pkCols, ","))
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
summary.PKColumn = pkCols[0]
|
||||
|
||||
// Query data for diff
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取源表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取目标表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCol := summary.PKColumn
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changed := false
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if changed {
|
||||
summary.Updates++
|
||||
} else {
|
||||
summary.Same++
|
||||
}
|
||||
} else {
|
||||
summary.Inserts++
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; !ok {
|
||||
summary.Deletes++
|
||||
}
|
||||
}
|
||||
|
||||
summary.CanSync = true
|
||||
result.Tables = append(result.Tables, summary)
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "差异分析完成")
|
||||
result.Message = fmt.Sprintf("已完成 %d 张表的差异分析", len(result.Tables))
|
||||
return result
|
||||
}
|
||||
164
internal/sync/preview.go
Normal file
164
internal/sync/preview.go
Normal file
@@ -0,0 +1,164 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PreviewRow struct {
|
||||
PK string `json:"pk"`
|
||||
Row map[string]interface{} `json:"row"`
|
||||
}
|
||||
|
||||
type PreviewUpdateRow struct {
|
||||
PK string `json:"pk"`
|
||||
ChangedColumns []string `json:"changedColumns"`
|
||||
Source map[string]interface{} `json:"source"`
|
||||
Target map[string]interface{} `json:"target"`
|
||||
}
|
||||
|
||||
type TableDiffPreview struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn"`
|
||||
TotalInserts int `json:"totalInserts"`
|
||||
TotalUpdates int `json:"totalUpdates"`
|
||||
TotalDeletes int `json:"totalDeletes"`
|
||||
Inserts []PreviewRow `json:"inserts"`
|
||||
Updates []PreviewUpdateRow `json:"updates"`
|
||||
Deletes []PreviewRow `json:"deletes"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Preview(config SyncConfig, tableName string, limit int) (TableDiffPreview, error) {
|
||||
if limit <= 0 {
|
||||
limit = 200
|
||||
}
|
||||
if limit > 500 {
|
||||
limit = 500
|
||||
}
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化源数据库驱动失败: %w", err)
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化目标数据库驱动失败: %w", err)
|
||||
}
|
||||
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("源数据库连接失败: %w", err)
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("目标数据库连接失败: %w", err)
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
return TableDiffPreview{}, fmt.Errorf("无主键,不支持数据预览")
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
return TableDiffPreview{}, fmt.Errorf("复合主键(%s),暂不支持数据预览", strings.Join(pkCols, ","))
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取源表失败: %w", err)
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取目标表失败: %w", err)
|
||||
}
|
||||
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
out := TableDiffPreview{
|
||||
Table: tableName,
|
||||
PKColumn: pkCol,
|
||||
TotalInserts: 0,
|
||||
TotalUpdates: 0,
|
||||
TotalDeletes: 0,
|
||||
Inserts: make([]PreviewRow, 0),
|
||||
Updates: make([]PreviewUpdateRow, 0),
|
||||
Deletes: make([]PreviewRow, 0),
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changedColumns := make([]string, 0)
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changedColumns = append(changedColumns, k)
|
||||
}
|
||||
}
|
||||
if len(changedColumns) > 0 {
|
||||
out.TotalUpdates++
|
||||
if len(out.Updates) < limit {
|
||||
out.Updates = append(out.Updates, PreviewUpdateRow{
|
||||
PK: pkVal,
|
||||
ChangedColumns: changedColumns,
|
||||
Source: sRow,
|
||||
Target: tRow,
|
||||
})
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
out.TotalInserts++
|
||||
if len(out.Inserts) < limit {
|
||||
out.Inserts = append(out.Inserts, PreviewRow{PK: pkVal, Row: sRow})
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; ok {
|
||||
continue
|
||||
}
|
||||
out.TotalDeletes++
|
||||
if len(out.Deletes) < limit {
|
||||
out.Deletes = append(out.Deletes, PreviewRow{PK: pkVal, Row: row})
|
||||
}
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
58
internal/sync/row_selection.go
Normal file
58
internal/sync/row_selection.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func filterRowsByPKSelection(pkCol string, rows []map[string]interface{}, enabled bool, selectedPKs []string) []map[string]interface{} {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return rows
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return rows
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(rows))
|
||||
for _, row := range rows {
|
||||
pkStr := fmt.Sprintf("%v", row[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, row)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdatesByPKSelection(pkCol string, updates []connection.UpdateRow, enabled bool, selectedPKs []string) []connection.UpdateRow {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(updates) == 0 {
|
||||
return updates
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
pkStr := fmt.Sprintf("%v", u.Keys[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, u)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
97
internal/sync/schema_align.go
Normal file
97
internal/sync/schema_align.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func collectRequiredColumns(inserts []map[string]interface{}, updates []connection.UpdateRow) map[string]string {
|
||||
// key: lower(columnName), value: original columnName
|
||||
required := make(map[string]string)
|
||||
for _, row := range inserts {
|
||||
for k := range row {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, u := range updates {
|
||||
for k := range u.Values {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
return required
|
||||
}
|
||||
|
||||
func filterInsertRows(inserts []map[string]interface{}, allowedLower map[string]struct{}) []map[string]interface{} {
|
||||
if len(inserts) == 0 || len(allowedLower) == 0 {
|
||||
return inserts
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(inserts))
|
||||
for _, row := range inserts {
|
||||
if len(row) == 0 {
|
||||
out = append(out, row)
|
||||
continue
|
||||
}
|
||||
n := make(map[string]interface{}, len(row))
|
||||
for k, v := range row {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
n[k] = v
|
||||
}
|
||||
}
|
||||
out = append(out, n)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdateRows(updates []connection.UpdateRow, allowedLower map[string]struct{}) []connection.UpdateRow {
|
||||
if len(updates) == 0 || len(allowedLower) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
if len(u.Values) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
values := make(map[string]interface{}, len(u.Values))
|
||||
for k, v := range u.Values {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
values[k] = v
|
||||
}
|
||||
}
|
||||
if len(values) == 0 {
|
||||
continue
|
||||
}
|
||||
out = append(out, connection.UpdateRow{
|
||||
Keys: u.Keys,
|
||||
Values: values,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func sanitizeMySQLColumnType(t string) string {
|
||||
tt := strings.TrimSpace(t)
|
||||
if tt == "" {
|
||||
return "TEXT"
|
||||
}
|
||||
|
||||
// 基础防护:避免把元数据中异常内容拼进 SQL。
|
||||
if strings.ContainsAny(tt, "`;\n\r") {
|
||||
return "TEXT"
|
||||
}
|
||||
return tt
|
||||
}
|
||||
101
internal/sync/schema_sync.go
Normal file
101
internal/sync/schema_sync.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (s *SyncEngine) syncTableSchema(config SyncConfig, res *SyncResult, sourceDB db.Database, targetDB db.Database, tableName string) error {
|
||||
targetType := strings.ToLower(strings.TrimSpace(config.TargetConfig.Type))
|
||||
if targetType != "mysql" {
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标数据库类型=%s 暂不支持结构同步,已跳过表 %s", config.TargetConfig.Type, tableName))
|
||||
return nil
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 1) 获取源表字段
|
||||
sourceCols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
// 2) 确保目标表存在
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
if sourceType != "mysql" {
|
||||
return fmt.Errorf("目标表不存在且源类型=%s 暂不支持自动建表: %w", config.SourceConfig.Type, err)
|
||||
}
|
||||
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标表 %s 不存在,开始尝试创建表结构", tableName))
|
||||
createSQL, errCreate := sourceDB.GetCreateStatement(sourceSchema, sourceTable)
|
||||
if errCreate != nil || strings.TrimSpace(createSQL) == "" {
|
||||
if errCreate == nil {
|
||||
errCreate = fmt.Errorf("建表语句为空")
|
||||
}
|
||||
return fmt.Errorf("获取源表建表语句失败: %w", errCreate)
|
||||
}
|
||||
|
||||
if _, errExec := targetDB.Exec(createSQL); errExec != nil {
|
||||
return fmt.Errorf("创建目标表失败: %w", errExec)
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("目标表创建成功:%s", tableName))
|
||||
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建目标表后获取字段失败: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
// 3) 补齐目标缺失字段(安全策略:新增字段统一允许 NULL)
|
||||
missing := make([]string, 0)
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
for _, c := range sourceCols {
|
||||
colName := strings.TrimSpace(c.Name)
|
||||
if colName == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(colName)
|
||||
if _, ok := targetColSet[lower]; ok {
|
||||
continue
|
||||
}
|
||||
missing = append(missing, colName)
|
||||
|
||||
colType := "TEXT"
|
||||
if sourceType == "mysql" {
|
||||
colType = sanitizeMySQLColumnType(c.Type)
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, res, "error", fmt.Sprintf(" -> 补字段失败:表=%s 字段=%s 错误=%v", tableName, colName, err))
|
||||
continue
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf(" -> 已补齐字段:表=%s 字段=%s 类型=%s", tableName, colName, colType))
|
||||
}
|
||||
|
||||
if len(missing) == 0 {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构一致:%s", tableName))
|
||||
} else {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构同步完成:%s(新增字段 %d 个)", tableName, len(missing)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
109
internal/sync/sql_helpers.go
Normal file
109
internal/sync/sql_helpers.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package sync
|
||||
|
||||
import "strings"
|
||||
|
||||
func normalizeSyncMode(mode string) string {
|
||||
m := strings.ToLower(strings.TrimSpace(mode))
|
||||
switch m {
|
||||
case "", "insert_update":
|
||||
return "insert_update"
|
||||
case "insert_only":
|
||||
return "insert_only"
|
||||
case "full_overwrite":
|
||||
return "full_overwrite"
|
||||
default:
|
||||
return "insert_update"
|
||||
}
|
||||
}
|
||||
|
||||
func quoteIdentByType(dbType string, ident string) string {
|
||||
if ident == "" {
|
||||
return ident
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "mysql":
|
||||
return "`" + strings.ReplaceAll(ident, "`", "``") + "`"
|
||||
default:
|
||||
return `"` + strings.ReplaceAll(ident, `"`, `""`) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
func quoteQualifiedIdentByType(dbType string, ident string) string {
|
||||
raw := strings.TrimSpace(ident)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
|
||||
parts := strings.Split(raw, ".")
|
||||
if len(parts) <= 1 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
|
||||
quotedParts := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
quotedParts = append(quotedParts, quoteIdentByType(dbType, part))
|
||||
}
|
||||
|
||||
if len(quotedParts) == 0 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
return strings.Join(quotedParts, ".")
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTable(dbType string, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase":
|
||||
return "public", rawTable
|
||||
default:
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
func qualifiedNameForQuery(dbType string, schema string, table string, original string) string {
|
||||
raw := strings.TrimSpace(original)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
if strings.Contains(raw, ".") {
|
||||
return raw
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" {
|
||||
s = "public"
|
||||
}
|
||||
if table == "" {
|
||||
return raw
|
||||
}
|
||||
return s + "." + table
|
||||
case "mysql":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" || table == "" {
|
||||
return table
|
||||
}
|
||||
return s + "." + table
|
||||
default:
|
||||
return table
|
||||
}
|
||||
}
|
||||
556
internal/sync/sync_engine.go
Normal file
556
internal/sync/sync_engine.go
Normal file
@@ -0,0 +1,556 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// SyncConfig defines the parameters for a synchronization task
|
||||
type SyncConfig struct {
|
||||
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
|
||||
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
|
||||
Tables []string `json:"tables"` // Tables to sync
|
||||
Content string `json:"content,omitempty"` // "data", "schema", "both"
|
||||
Mode string `json:"mode"` // "insert_update", "insert_only", "full_overwrite"
|
||||
JobID string `json:"jobId,omitempty"`
|
||||
AutoAddColumns bool `json:"autoAddColumns,omitempty"` // 自动补齐缺失字段(当前仅 MySQL 目标支持)
|
||||
TableOptions map[string]TableOptions `json:"tableOptions,omitempty"`
|
||||
}
|
||||
|
||||
// SyncResult holds the result of the sync operation
|
||||
type SyncResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Logs []string `json:"logs"`
|
||||
TablesSynced int `json:"tablesSynced"`
|
||||
RowsInserted int `json:"rowsInserted"`
|
||||
RowsUpdated int `json:"rowsUpdated"`
|
||||
RowsDeleted int `json:"rowsDeleted"`
|
||||
}
|
||||
|
||||
type SyncEngine struct {
|
||||
reporter Reporter
|
||||
}
|
||||
|
||||
func NewSyncEngine(reporter Reporter) *SyncEngine {
|
||||
return &SyncEngine{reporter: reporter}
|
||||
}
|
||||
|
||||
// CompareAndSync performs the synchronization
|
||||
func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
|
||||
result := SyncResult{Success: true, Logs: []string{}}
|
||||
logger.Infof("开始数据同步:源=%s 目标=%s 表数量=%d", formatConnSummaryForSync(config.SourceConfig), formatConnSummaryForSync(config.TargetConfig), len(config.Tables))
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "开始同步")
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
modeRaw := strings.ToLower(strings.TrimSpace(config.Mode))
|
||||
if modeRaw != "" && modeRaw != "insert_update" && modeRaw != "insert_only" && modeRaw != "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步模式 %q,已自动使用 insert_update", config.Mode))
|
||||
}
|
||||
defaultMode := normalizeSyncMode(config.Mode)
|
||||
|
||||
contentLabel := "仅同步数据"
|
||||
if syncSchema && syncData {
|
||||
contentLabel = "同步结构+数据"
|
||||
} else if syncSchema {
|
||||
contentLabel = "仅同步结构"
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("同步内容:%s;模式:%s;自动补字段:%v", contentLabel, defaultMode, config.AutoAddColumns))
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return s.fail(config.JobID, totalTables, result, "初始化源数据库驱动失败: "+err.Error())
|
||||
}
|
||||
if config.SourceConfig.Type == "custom" {
|
||||
// Custom DB setup would go here if needed
|
||||
}
|
||||
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return s.fail(config.JobID, totalTables, result, "初始化目标数据库驱动失败: "+err.Error())
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接源数据库: %s...", config.SourceConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接源数据库")
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return s.fail(config.JobID, totalTables, result, "源数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接目标数据库: %s...", config.TargetConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接目标数据库")
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return s.fail(config.JobID, totalTables, result, "目标数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
// Iterate Tables
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
tableMode := defaultMode
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在同步表: %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("同步表(%d/%d)", i+1, totalTables))
|
||||
defer s.progress(config.JobID, i+1, totalTables, tableName, "表处理完成")
|
||||
|
||||
if syncSchema {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "同步表结构")
|
||||
if err := s.syncTableSchema(config, &result, sourceDB, targetDB, tableName); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("表结构同步失败:表=%s 错误=%v", tableName, err))
|
||||
return
|
||||
}
|
||||
}
|
||||
if !syncData {
|
||||
result.TablesSynced++
|
||||
return
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 1. Get Columns & PKs
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
logger.Error(err, "获取源表列信息失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("获取表 %s 的列信息失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
sourceColsByLower := make(map[string]connection.ColumnDefinition, len(cols))
|
||||
for _, col := range cols {
|
||||
if strings.TrimSpace(col.Name) == "" {
|
||||
continue
|
||||
}
|
||||
sourceColsByLower[strings.ToLower(strings.TrimSpace(col.Name))] = col
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, col := range cols {
|
||||
if col.Key == "PRI" || col.Key == "PK" {
|
||||
pkCols = append(pkCols, col.Name)
|
||||
}
|
||||
}
|
||||
|
||||
if len(pkCols) == 0 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 未找到主键,已跳过数据同步(避免产生重复数据)", tableName))
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 为复合主键(%s),当前暂不支持数据同步", tableName, strings.Join(pkCols, ",")))
|
||||
return
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
opts := TableOptions{Insert: true, Update: true, Delete: false}
|
||||
if config.TableOptions != nil {
|
||||
if t, ok := config.TableOptions[tableName]; ok {
|
||||
opts = t
|
||||
// 默认防护:如用户未设置任意一个字段,保持 insert/update 默认 true、delete 默认 false
|
||||
if !t.Insert && !t.Update && !t.Delete {
|
||||
opts = t
|
||||
}
|
||||
}
|
||||
}
|
||||
if !opts.Insert && !opts.Update && !opts.Delete {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("表 %s 未勾选任何操作,已跳过", tableName))
|
||||
return
|
||||
}
|
||||
|
||||
// 2. Fetch Data (MEMORY INTENSIVE - PROTOTYPE ONLY)
|
||||
// TODO: Implement paging/streaming
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取源表数据")
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取源表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取源表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
var inserts []map[string]interface{}
|
||||
var updates []connection.UpdateRow
|
||||
|
||||
if tableMode == "insert_update" {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取目标表数据")
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取目标表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取目标表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
// 3. Compare (In-Memory Hash Map)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "对比差异")
|
||||
targetMap := make(map[string]map[string]interface{})
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", row[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", sRow[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changes := make(map[string]interface{})
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changes[k] = v
|
||||
}
|
||||
}
|
||||
if len(changes) > 0 {
|
||||
updates = append(updates, connection.UpdateRow{
|
||||
Keys: map[string]interface{}{pkCol: sRow[pkCol]},
|
||||
Values: changes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
inserts = append(inserts, sRow)
|
||||
}
|
||||
}
|
||||
|
||||
var deletes []map[string]interface{}
|
||||
if opts.Delete {
|
||||
for pkStr, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkStr]; ok {
|
||||
continue
|
||||
}
|
||||
deletes = append(deletes, map[string]interface{}{pkCol: row[pkCol]})
|
||||
}
|
||||
}
|
||||
|
||||
// apply operation selection
|
||||
inserts = filterRowsByPKSelection(pkCol, inserts, opts.Insert, opts.SelectedInsertPKs)
|
||||
updates = filterUpdatesByPKSelection(pkCol, updates, opts.Update, opts.SelectedUpdatePKs)
|
||||
deletes = filterRowsByPKSelection(pkCol, deletes, opts.Delete, opts.SelectedDeletePKs)
|
||||
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
Deletes: deletes,
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(changeSet.Inserts, changeSet.Updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
changeSet.Inserts = filterInsertRows(changeSet.Inserts, targetColSet)
|
||||
changeSet.Updates = filterUpdateRows(changeSet.Updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 || len(changeSet.Deletes) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行, 需删除: %d 行", len(changeSet.Inserts), len(changeSet.Updates), len(changeSet.Deletes)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
result.RowsDeleted += len(changeSet.Deletes)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
return
|
||||
} else {
|
||||
// insert_only / full_overwrite: do not compare target, just insert source rows
|
||||
inserts = sourceRows
|
||||
}
|
||||
|
||||
// full_overwrite: clear target table first
|
||||
if tableMode == "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 全量覆盖模式:即将清空目标表 %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, "清空目标表")
|
||||
clearSQL := ""
|
||||
if strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
clearSQL = fmt.Sprintf("TRUNCATE TABLE %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
} else {
|
||||
clearSQL = fmt.Sprintf("DELETE FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
}
|
||||
if _, err := targetDB.Exec(clearSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 清空目标表失败: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(inserts, updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
inserts = filterInsertRows(inserts, targetColSet)
|
||||
updates = filterUpdateRows(updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
}
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行", len(changeSet.Inserts), len(changeSet.Updates)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "同步完成")
|
||||
return result
|
||||
}
|
||||
|
||||
func formatConnSummaryForSync(config connection.ConnectionConfig) string {
|
||||
timeoutSeconds := config.Timeout
|
||||
if timeoutSeconds <= 0 {
|
||||
timeoutSeconds = 30
|
||||
}
|
||||
|
||||
dbName := strings.TrimSpace(config.Database)
|
||||
if dbName == "" {
|
||||
dbName = "(default)"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("类型=%s 地址=%s:%d 数据库=%s 用户=%s 超时=%ds",
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds)
|
||||
}
|
||||
|
||||
func (s *SyncEngine) appendLog(jobID string, res *SyncResult, level string, msg string) {
|
||||
if res != nil {
|
||||
res.Logs = append(res.Logs, msg)
|
||||
}
|
||||
if s.reporter.OnLog != nil && strings.TrimSpace(jobID) != "" {
|
||||
s.reporter.OnLog(SyncLogEvent{
|
||||
JobID: jobID,
|
||||
Level: level,
|
||||
Message: msg,
|
||||
Ts: time.Now().UnixMilli(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SyncEngine) progress(jobID string, current, total int, table string, stage string) {
|
||||
if s.reporter.OnProgress == nil || strings.TrimSpace(jobID) == "" {
|
||||
return
|
||||
}
|
||||
percent := 0
|
||||
if total <= 0 {
|
||||
if current > 0 {
|
||||
percent = 100
|
||||
}
|
||||
} else {
|
||||
if current < 0 {
|
||||
current = 0
|
||||
}
|
||||
if current > total {
|
||||
current = total
|
||||
}
|
||||
percent = (current * 100) / total
|
||||
}
|
||||
s.reporter.OnProgress(SyncProgressEvent{
|
||||
JobID: jobID,
|
||||
Percent: percent,
|
||||
Current: current,
|
||||
Total: total,
|
||||
Table: table,
|
||||
Stage: stage,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SyncEngine) fail(jobID string, totalTables int, res SyncResult, msg string) SyncResult {
|
||||
res.Success = false
|
||||
res.Message = msg
|
||||
s.appendLog(jobID, &res, "error", "致命错误: "+msg)
|
||||
s.progress(jobID, res.TablesSynced, totalTables, "", "同步失败")
|
||||
return res
|
||||
}
|
||||
30
internal/sync/sync_events.go
Normal file
30
internal/sync/sync_events.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package sync
|
||||
|
||||
const (
|
||||
EventSyncStart = "sync:start"
|
||||
EventSyncProgress = "sync:progress"
|
||||
EventSyncLog = "sync:log"
|
||||
EventSyncDone = "sync:done"
|
||||
)
|
||||
|
||||
type SyncLogEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Level string `json:"level"` // info/warn/error
|
||||
Message string `json:"message"`
|
||||
Ts int64 `json:"ts"` // Unix milli
|
||||
}
|
||||
|
||||
type SyncProgressEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Percent int `json:"percent"`
|
||||
Current int `json:"current"` // 已完成表数
|
||||
Total int `json:"total"` // 总表数
|
||||
Table string `json:"table,omitempty"`
|
||||
Stage string `json:"stage,omitempty"`
|
||||
}
|
||||
|
||||
type Reporter struct {
|
||||
OnLog func(event SyncLogEvent)
|
||||
OnProgress func(event SyncProgressEvent)
|
||||
}
|
||||
|
||||
13
internal/sync/table_options.go
Normal file
13
internal/sync/table_options.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package sync
|
||||
|
||||
// TableOptions controls which operations to apply per table, and optional row selection.
|
||||
// 注意:如未指定 Selected*PKs,则表示“同步全部该类型差异数据”;如指定为空数组,则同样表示全部。
|
||||
type TableOptions struct {
|
||||
Insert bool `json:"insert,omitempty"`
|
||||
Update bool `json:"update,omitempty"`
|
||||
Delete bool `json:"delete,omitempty"`
|
||||
|
||||
SelectedInsertPKs []string `json:"selectedInsertPks,omitempty"`
|
||||
SelectedUpdatePKs []string `json:"selectedUpdatePks,omitempty"`
|
||||
SelectedDeletePKs []string `json:"selectedDeletePks,omitempty"`
|
||||
}
|
||||
Reference in New Issue
Block a user