mirror of
https://github.com/Syngnat/GoNavi.git
synced 2026-05-06 20:03:05 +08:00
✨ feat(sync): 数据同步支持差异对比、行级选择与实时进度日志
- 新增差异分析/预览接口与前端预览抽屉(插入/更新/删除) - 支持按表勾选插入/更新/删除(删除默认不勾选) - 支持按主键选择行级同步;无主键/复合主键表跳过并提示 - 同步过程实时输出中文日志与进度条,便于定位失败步骤
This commit is contained in:
@@ -141,8 +141,7 @@ const EditableCell: React.FC<EditableCellProps> = React.memo(({
|
||||
return <td {...restProps} onDoubleClick={editable ? toggleEdit : undefined}>{childNode}</td>;
|
||||
});
|
||||
|
||||
const ContextMenuRow = React.memo(({ children, ...props }: any) => {
|
||||
const record = props.record;
|
||||
const ContextMenuRow = React.memo(({ children, record, ...props }: any) => {
|
||||
const context = useContext(DataContext);
|
||||
|
||||
if (!record || !context) return <tr {...props}>{children}</tr>;
|
||||
|
||||
@@ -1,14 +1,36 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography } from 'antd';
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Modal, Form, Select, Button, message, Steps, Transfer, Card, Alert, Divider, Typography, Progress, Checkbox, Table, Drawer, Tabs } from 'antd';
|
||||
import { useStore } from '../store';
|
||||
import { DBGetDatabases, DBGetTables, DataSync } from '../../wailsjs/go/app/App';
|
||||
import { DBGetDatabases, DBGetTables, DataSync, DataSyncAnalyze, DataSyncPreview } from '../../wailsjs/go/app/App';
|
||||
import { SavedConnection } from '../types';
|
||||
import { connection } from '../../wailsjs/go/models';
|
||||
import { EventsOn } from '../../wailsjs/runtime/runtime';
|
||||
|
||||
const { Title, Text } = Typography;
|
||||
const { Step } = Steps;
|
||||
const { Option } = Select;
|
||||
|
||||
type SyncLogEvent = { jobId: string; level?: string; message?: string; ts?: number };
|
||||
type SyncProgressEvent = { jobId: string; percent?: number; current?: number; total?: number; table?: string; stage?: string };
|
||||
type SyncLogItem = { level: string; message: string; ts?: number };
|
||||
type TableDiffSummary = {
|
||||
table: string;
|
||||
pkColumn?: string;
|
||||
canSync?: boolean;
|
||||
inserts?: number;
|
||||
updates?: number;
|
||||
deletes?: number;
|
||||
same?: number;
|
||||
message?: string;
|
||||
};
|
||||
type TableOps = {
|
||||
insert: boolean;
|
||||
update: boolean;
|
||||
delete: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
};
|
||||
|
||||
const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open, onClose }) => {
|
||||
const connections = useStore((state) => state.connections);
|
||||
const [currentStep, setCurrentStep] = useState(0);
|
||||
@@ -27,8 +49,76 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
const [allTables, setAllTables] = useState<string[]>([]);
|
||||
const [selectedTables, setSelectedTables] = useState<string[]>([]);
|
||||
|
||||
// Options
|
||||
const [syncContent, setSyncContent] = useState<'data' | 'schema' | 'both'>('data');
|
||||
const [syncMode, setSyncMode] = useState<string>('insert_update');
|
||||
const [autoAddColumns, setAutoAddColumns] = useState<boolean>(true);
|
||||
const [showSameTables, setShowSameTables] = useState<boolean>(false);
|
||||
const [analyzing, setAnalyzing] = useState<boolean>(false);
|
||||
const [diffTables, setDiffTables] = useState<TableDiffSummary[]>([]);
|
||||
const [tableOptions, setTableOptions] = useState<Record<string, TableOps>>({});
|
||||
|
||||
const [previewOpen, setPreviewOpen] = useState(false);
|
||||
const [previewTable, setPreviewTable] = useState<string>('');
|
||||
const [previewLoading, setPreviewLoading] = useState(false);
|
||||
const [previewData, setPreviewData] = useState<any>(null);
|
||||
|
||||
// Step 3: Result
|
||||
const [syncResult, setSyncResult] = useState<any>(null);
|
||||
const [syncing, setSyncing] = useState(false);
|
||||
const [syncLogs, setSyncLogs] = useState<SyncLogItem[]>([]);
|
||||
const [syncProgress, setSyncProgress] = useState<{ percent: number; current: number; total: number; table: string; stage: string }>({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: 0,
|
||||
table: '',
|
||||
stage: ''
|
||||
});
|
||||
const jobIdRef = useRef<string>('');
|
||||
const logBoxRef = useRef<HTMLDivElement>(null);
|
||||
const autoScrollRef = useRef(true);
|
||||
|
||||
const normalizeConnConfig = (conn: SavedConnection, database?: string) => ({
|
||||
...conn.config,
|
||||
port: Number((conn.config as any).port),
|
||||
password: conn.config.password || "",
|
||||
useSSH: conn.config.useSSH || false,
|
||||
ssh: conn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: typeof database === 'string' ? database : (conn.config.database || ""),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!open) return;
|
||||
|
||||
const offLog = EventsOn('sync:log', (event: SyncLogEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
const msg = String(event.message || '').trim();
|
||||
if (!msg) return;
|
||||
setSyncLogs(prev => [...prev, { level: String(event.level || 'info'), message: msg, ts: event.ts }]);
|
||||
});
|
||||
|
||||
const offProgress = EventsOn('sync:progress', (event: SyncProgressEvent) => {
|
||||
if (!event || event.jobId !== jobIdRef.current) return;
|
||||
setSyncProgress(prev => ({
|
||||
percent: typeof event.percent === 'number' ? event.percent : prev.percent,
|
||||
current: typeof event.current === 'number' ? event.current : prev.current,
|
||||
total: typeof event.total === 'number' ? event.total : prev.total,
|
||||
table: typeof event.table === 'string' ? event.table : prev.table,
|
||||
stage: typeof event.stage === 'string' ? event.stage : prev.stage,
|
||||
}));
|
||||
});
|
||||
|
||||
return () => {
|
||||
offLog();
|
||||
offProgress();
|
||||
};
|
||||
}, [open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!logBoxRef.current) return;
|
||||
if (!autoScrollRef.current) return;
|
||||
logBoxRef.current.scrollTop = logBoxRef.current.scrollHeight;
|
||||
}, [syncLogs]);
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
@@ -38,7 +128,23 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setSourceDb('');
|
||||
setTargetDb('');
|
||||
setSelectedTables([]);
|
||||
setSyncContent('data');
|
||||
setSyncMode('insert_update');
|
||||
setAutoAddColumns(true);
|
||||
setShowSameTables(false);
|
||||
setAnalyzing(false);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setPreviewOpen(false);
|
||||
setPreviewTable('');
|
||||
setPreviewLoading(false);
|
||||
setPreviewData(null);
|
||||
setSyncResult(null);
|
||||
setSyncing(false);
|
||||
setSyncLogs([]);
|
||||
setSyncProgress({ percent: 0, current: 0, total: 0, table: '', stage: '' });
|
||||
jobIdRef.current = '';
|
||||
autoScrollRef.current = true;
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
@@ -49,7 +155,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(conn.config as any);
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setSourceDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
@@ -65,7 +171,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
if (conn) {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await DBGetDatabases(conn.config as any);
|
||||
const res = await DBGetDatabases(normalizeConnConfig(conn) as any);
|
||||
if (res.success) {
|
||||
setTargetDbs((res.data as any[]).map((r: any) => r.Database || r.database || r.username));
|
||||
}
|
||||
@@ -83,7 +189,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
try {
|
||||
const conn = connections.find(c => c.id === sourceConnId);
|
||||
if (conn) {
|
||||
const config = { ...conn.config, database: sourceDb };
|
||||
const config = normalizeConnConfig(conn, sourceDb);
|
||||
const res = await DBGetTables(config as any, sourceDb);
|
||||
if (res.success) {
|
||||
// DBGetTables returns [{Table: "name"}, ...]
|
||||
@@ -98,36 +204,221 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
const runSync = async () => {
|
||||
const updateTableOption = (table: string, key: keyof TableOps, value: any) => {
|
||||
setTableOptions(prev => ({
|
||||
...prev,
|
||||
[table]: { ...(prev[table] || { insert: true, update: true, delete: false }), [key]: value }
|
||||
}));
|
||||
};
|
||||
|
||||
const analyzeDiff = async () => {
|
||||
if (selectedTables.length === 0) return;
|
||||
if (!sourceConnId || !targetConnId) return message.error("Select connections first");
|
||||
if (!sourceDb || !targetDb) return message.error("Select databases first");
|
||||
|
||||
setLoading(true);
|
||||
setAnalyzing(true);
|
||||
setDiffTables([]);
|
||||
setTableOptions({});
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
const jobId = `analyze-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({ percent: 0, current: 0, total: selectedTables.length, table: '', stage: '差异分析' });
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: syncContent,
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncAnalyze(config as any);
|
||||
if (res.success) {
|
||||
const tables = ((res.data as any)?.tables || []) as TableDiffSummary[];
|
||||
setDiffTables(tables);
|
||||
const init: Record<string, TableOps> = {};
|
||||
tables.forEach(t => {
|
||||
const can = !!t.canSync;
|
||||
init[t.table] = {
|
||||
insert: can,
|
||||
update: can,
|
||||
delete: false,
|
||||
selectedInsertPks: [],
|
||||
selectedUpdatePks: [],
|
||||
selectedDeletePks: [],
|
||||
};
|
||||
});
|
||||
setTableOptions(init);
|
||||
message.success("差异分析完成");
|
||||
} else {
|
||||
message.error(res.message || "差异分析失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("差异分析失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
setAnalyzing(false);
|
||||
};
|
||||
|
||||
const openPreview = async (table: string) => {
|
||||
if (!table) return;
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
setPreviewOpen(true);
|
||||
setPreviewTable(table);
|
||||
setPreviewLoading(true);
|
||||
setPreviewData(null);
|
||||
|
||||
const config = {
|
||||
sourceConfig: normalizeConnConfig(sConn, sourceDb),
|
||||
targetConfig: normalizeConnConfig(tConn, targetDb),
|
||||
tables: selectedTables,
|
||||
content: "data",
|
||||
mode: "insert_update",
|
||||
autoAddColumns,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSyncPreview(config as any, table, 200);
|
||||
if (res.success) {
|
||||
setPreviewData(res.data);
|
||||
} else {
|
||||
message.error(res.message || "加载差异预览失败");
|
||||
}
|
||||
} catch (e: any) {
|
||||
message.error("加载差异预览失败: " + (e?.message || ""));
|
||||
}
|
||||
|
||||
setPreviewLoading(false);
|
||||
};
|
||||
|
||||
const runSync = async () => {
|
||||
if (syncContent !== 'schema' && diffTables.length === 0) {
|
||||
message.error("请先对比差异,再开始同步");
|
||||
return;
|
||||
}
|
||||
if (syncContent !== 'schema' && syncMode === 'full_overwrite') {
|
||||
const ok = await new Promise<boolean>((resolve) => {
|
||||
Modal.confirm({
|
||||
title: '确认全量覆盖',
|
||||
content: '全量覆盖会清空目标表数据后再插入,请确认已备份目标库。',
|
||||
okText: '继续执行',
|
||||
cancelText: '取消',
|
||||
onOk: () => resolve(true),
|
||||
onCancel: () => resolve(false),
|
||||
});
|
||||
});
|
||||
if (!ok) return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setSyncing(true);
|
||||
setCurrentStep(2);
|
||||
setSyncResult(null);
|
||||
setSyncLogs([]);
|
||||
|
||||
const sConn = connections.find(c => c.id === sourceConnId)!;
|
||||
const tConn = connections.find(c => c.id === targetConnId)!;
|
||||
|
||||
const jobId = `sync-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
||||
jobIdRef.current = jobId;
|
||||
autoScrollRef.current = true;
|
||||
setSyncProgress({
|
||||
percent: 0,
|
||||
current: 0,
|
||||
total: selectedTables.length,
|
||||
table: '',
|
||||
stage: '准备开始',
|
||||
});
|
||||
|
||||
const config = {
|
||||
sourceConfig: { ...sConn.config, database: sourceDb },
|
||||
targetConfig: { ...tConn.config, database: targetDb },
|
||||
sourceConfig: {
|
||||
...sConn.config,
|
||||
port: Number((sConn.config as any).port),
|
||||
password: sConn.config.password || "",
|
||||
useSSH: sConn.config.useSSH || false,
|
||||
ssh: sConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: sourceDb,
|
||||
},
|
||||
targetConfig: {
|
||||
...tConn.config,
|
||||
port: Number((tConn.config as any).port),
|
||||
password: tConn.config.password || "",
|
||||
useSSH: tConn.config.useSSH || false,
|
||||
ssh: tConn.config.ssh || { host: "", port: 22, user: "", password: "", keyPath: "" },
|
||||
database: targetDb,
|
||||
},
|
||||
tables: selectedTables,
|
||||
mode: "insert_update"
|
||||
content: syncContent,
|
||||
mode: syncMode,
|
||||
autoAddColumns,
|
||||
tableOptions,
|
||||
jobId,
|
||||
};
|
||||
|
||||
try {
|
||||
const res = await DataSync(config as any);
|
||||
setSyncResult(res);
|
||||
setCurrentStep(2);
|
||||
if (Array.isArray(res?.logs) && res.logs.length > 0) {
|
||||
setSyncLogs(prev => {
|
||||
if (prev.length > 0) return prev;
|
||||
return (res.logs as string[]).map((log) => {
|
||||
const msg = String(log || '').trim();
|
||||
if (msg.includes('致命错误') || msg.includes('失败')) return { level: 'error', message: msg };
|
||||
if (msg.includes('跳过') || msg.includes('警告')) return { level: 'warn', message: msg };
|
||||
return { level: 'info', message: msg };
|
||||
});
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
message.error("Sync execution failed");
|
||||
setSyncResult({ success: false, message: "同步执行失败", logs: [] });
|
||||
}
|
||||
setLoading(false);
|
||||
setSyncing(false);
|
||||
};
|
||||
|
||||
const renderSyncLogItem = (item: SyncLogItem) => {
|
||||
const level = String(item.level || 'info').toLowerCase();
|
||||
const color = level === 'error' ? '#ff4d4f' : (level === 'warn' ? '#faad14' : '#595959');
|
||||
const label = level === 'error' ? '错误' : (level === 'warn' ? '警告' : '信息');
|
||||
const timeText = typeof item.ts === 'number' ? new Date(item.ts).toLocaleTimeString('zh-CN', { hour12: false }) : '';
|
||||
return (
|
||||
<div style={{ display: 'flex', gap: 8, alignItems: 'flex-start' }}>
|
||||
<span style={{ color, flex: '0 0 auto' }}>● {label}</span>
|
||||
{timeText && <span style={{ color: '#8c8c8c', flex: '0 0 auto' }}>{timeText}</span>}
|
||||
<span style={{ whiteSpace: 'pre-wrap', wordBreak: 'break-word' }}>{item.message}</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title="数据同步"
|
||||
open={open}
|
||||
onCancel={onClose}
|
||||
width={800}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
title="数据同步"
|
||||
open={open}
|
||||
onCancel={() => {
|
||||
if (syncing) {
|
||||
message.warning("同步执行中,暂不支持关闭");
|
||||
return;
|
||||
}
|
||||
onClose();
|
||||
}}
|
||||
width={800}
|
||||
footer={null}
|
||||
destroyOnHidden
|
||||
closable={!syncing}
|
||||
maskClosable={!syncing}
|
||||
>
|
||||
<Steps current={currentStep} style={{ marginBottom: 24 }}>
|
||||
<Step title="配置源与目标" />
|
||||
@@ -137,34 +428,67 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 1: CONFIG */}
|
||||
{currentStep === 0 && (
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<div>
|
||||
<div style={{ display: 'flex', gap: 24, justifyContent: 'center' }}>
|
||||
<Card title="源数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={sourceDb} onChange={setSourceDb} showSearch>
|
||||
{sourceDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={targetDb} onChange={setTargetDb} showSearch>
|
||||
{targetDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<Card title="同步选项" style={{ marginTop: 16 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={sourceConnId} onChange={handleSourceConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
<Form.Item label="同步内容">
|
||||
<Select value={syncContent} onChange={setSyncContent}>
|
||||
<Option value="data">仅同步数据</Option>
|
||||
<Option value="schema">仅同步结构</Option>
|
||||
<Option value="both">同步结构 + 数据</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={sourceDb} onChange={setSourceDb} showSearch>
|
||||
{sourceDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
<Form.Item label="同步模式">
|
||||
<Select value={syncMode} onChange={setSyncMode} disabled={syncContent === 'schema'}>
|
||||
<Option value="insert_update">增量同步(对比差异,按插入/更新/删除勾选执行)</Option>
|
||||
<Option value="insert_only">仅插入(不对比目标;无主键表将跳过)</Option>
|
||||
<Option value="full_overwrite">全量覆盖(清空目标表后插入)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Card>
|
||||
<div style={{ display: 'flex', alignItems: 'center' }}>至</div>
|
||||
<Card title="目标数据库" style={{ width: 350 }}>
|
||||
<Form layout="vertical">
|
||||
<Form.Item label="连接">
|
||||
<Select value={targetConnId} onChange={handleTargetConnChange}>
|
||||
{connections.map(c => <Option key={c.id} value={c.id}>{c.name} ({c.config.type})</Option>)}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item label="数据库">
|
||||
<Select value={targetDb} onChange={setTargetDb} showSearch>
|
||||
{targetDbs.map(d => <Option key={d} value={d}>{d}</Option>)}
|
||||
</Select>
|
||||
<Form.Item>
|
||||
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
|
||||
自动补齐目标表缺失字段(仅 MySQL 目标)
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
{syncContent !== 'schema' && syncMode === 'full_overwrite' && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="全量覆盖会清空目标表数据,请谨慎使用。"
|
||||
/>
|
||||
)}
|
||||
</Form>
|
||||
</Card>
|
||||
</div>
|
||||
@@ -172,32 +496,155 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
|
||||
{/* STEP 2: TABLES */}
|
||||
{currentStep === 1 && (
|
||||
<div style={{ height: 400 }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: 12 }}>
|
||||
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
|
||||
<Text type="secondary">请选择需要同步的表:</Text>
|
||||
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
|
||||
显示相同表
|
||||
</Checkbox>
|
||||
</div>
|
||||
<Transfer
|
||||
dataSource={allTables.map(t => ({ key: t, title: t }))}
|
||||
titles={['源表', '已选表']}
|
||||
targetKeys={selectedTables}
|
||||
onChange={(keys) => setSelectedTables(keys as string[])}
|
||||
render={item => item.title}
|
||||
listStyle={{ width: 350, height: 350, marginTop: 12 }}
|
||||
listStyle={{ width: 350, height: 280, marginTop: 0 }}
|
||||
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表', notFoundContent: '暂无数据' }}
|
||||
/>
|
||||
|
||||
{diffTables.length > 0 && (
|
||||
<div>
|
||||
<Divider orientation="left">对比结果</Divider>
|
||||
<Table
|
||||
size="small"
|
||||
pagination={false}
|
||||
rowKey={(r: any) => r.table}
|
||||
dataSource={diffTables.filter(t => {
|
||||
const ins = Number(t.inserts || 0);
|
||||
const upd = Number(t.updates || 0);
|
||||
const del = Number(t.deletes || 0);
|
||||
const same = Number(t.same || 0);
|
||||
const msg = String(t.message || '').trim();
|
||||
const can = !!t.canSync;
|
||||
if (showSameTables) return true;
|
||||
if (!can) return true;
|
||||
if (msg) return true;
|
||||
return ins > 0 || upd > 0 || del > 0 || same === 0;
|
||||
})}
|
||||
columns={[
|
||||
{ title: '表名', dataIndex: 'table', key: 'table', ellipsis: true },
|
||||
{
|
||||
title: '插入',
|
||||
key: 'inserts',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.inserts || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.insert}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'insert', e.target.checked)}
|
||||
>
|
||||
{Number(r.inserts || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '更新',
|
||||
key: 'updates',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.updates || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.update}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'update', e.target.checked)}
|
||||
>
|
||||
{Number(r.updates || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: '删除',
|
||||
key: 'deletes',
|
||||
width: 90,
|
||||
render: (_: any, r: any) => {
|
||||
const ops = tableOptions[r.table] || { insert: true, update: true, delete: false };
|
||||
const disabled = !r.canSync || analyzing || Number(r.deletes || 0) === 0;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={!!ops.delete}
|
||||
disabled={disabled}
|
||||
onChange={(e) => updateTableOption(r.table, 'delete', e.target.checked)}
|
||||
>
|
||||
{Number(r.deletes || 0)}
|
||||
</Checkbox>
|
||||
);
|
||||
}
|
||||
},
|
||||
{ title: '相同', dataIndex: 'same', key: 'same', width: 70, render: (v: any) => Number(v || 0) },
|
||||
{ title: '消息', dataIndex: 'message', key: 'message', ellipsis: true, render: (v: any) => (v ? String(v) : '') },
|
||||
{
|
||||
title: '预览',
|
||||
key: 'preview',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => {
|
||||
const can = !!r.canSync;
|
||||
const hasDiff = Number(r.inserts || 0) + Number(r.updates || 0) + Number(r.deletes || 0) > 0;
|
||||
return (
|
||||
<Button size="small" disabled={!can || !hasDiff || analyzing} onClick={() => openPreview(r.table)}>
|
||||
查看
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* STEP 3: RESULT */}
|
||||
{currentStep === 2 && syncResult && (
|
||||
{currentStep === 2 && (
|
||||
<div>
|
||||
<Alert
|
||||
message={syncResult.success ? "同步完成" : "同步失败"}
|
||||
description={syncResult.message || `成功同步 ${syncResult.tablesSynced} 张表. 插入: ${syncResult.rowsInserted}, 更新: ${syncResult.rowsUpdated}`}
|
||||
type={syncResult.success ? "success" : "error"}
|
||||
showIcon
|
||||
<Alert
|
||||
message={syncing ? "正在同步" : (syncResult?.success ? "同步完成" : "同步失败")}
|
||||
description={
|
||||
syncing
|
||||
? `当前阶段:${syncProgress.stage || '执行中'}${syncProgress.table ? `,表:${syncProgress.table}` : ''}`
|
||||
: (syncResult?.message || `成功同步 ${syncResult?.tablesSynced || 0} 张表. 插入: ${syncResult?.rowsInserted || 0}, 更新: ${syncResult?.rowsUpdated || 0}`)
|
||||
}
|
||||
type={syncing ? "info" : (syncResult?.success ? "success" : "error")}
|
||||
showIcon
|
||||
/>
|
||||
|
||||
<div style={{ marginTop: 12 }}>
|
||||
<Progress
|
||||
percent={syncProgress.percent}
|
||||
status={syncing ? "active" : (syncResult?.success ? "success" : "exception")}
|
||||
format={() => `${syncProgress.current}/${syncProgress.total}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Divider orientation="left">日志</Divider>
|
||||
<div style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}>
|
||||
{syncResult.logs.map((log: string, i: number) => <div key={i}>{log}</div>)}
|
||||
<div
|
||||
ref={logBoxRef}
|
||||
onScroll={() => {
|
||||
const el = logBoxRef.current;
|
||||
if (!el) return;
|
||||
const nearBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
|
||||
autoScrollRef.current = nearBottom;
|
||||
}}
|
||||
style={{ background: '#f5f5f5', padding: 12, height: 300, overflowY: 'auto', fontFamily: 'monospace' }}
|
||||
>
|
||||
{syncLogs.map((item, i: number) => <div key={i}>{renderSyncLogItem(item)}</div>)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -206,20 +653,154 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
|
||||
{currentStep === 0 && (
|
||||
<Button type="primary" onClick={nextToTables} loading={loading}>下一步</Button>
|
||||
)}
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}>上一步</Button>
|
||||
<Button type="primary" onClick={runSync} loading={loading} disabled={selectedTables.length === 0}>开始同步</Button>
|
||||
{currentStep === 1 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}>上一步</Button>
|
||||
<Button onClick={analyzeDiff} loading={loading} disabled={syncContent === 'schema' || selectedTables.length === 0 || analyzing} style={{ marginRight: 8 }}>
|
||||
对比差异
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runSync}
|
||||
loading={loading}
|
||||
disabled={selectedTables.length === 0 || (syncContent !== 'schema' && diffTables.length === 0)}
|
||||
>
|
||||
开始同步
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
{currentStep === 2 && (
|
||||
<>
|
||||
<Button onClick={() => setCurrentStep(1)} style={{ marginRight: 8 }}>继续同步</Button>
|
||||
<Button type="primary" onClick={onClose}>关闭</Button>
|
||||
<Button disabled={syncing} onClick={() => setCurrentStep(1)} style={{ marginRight: 8 }}>继续同步</Button>
|
||||
<Button type="primary" disabled={syncing} onClick={onClose}>关闭</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
<Drawer
|
||||
title={`差异预览:${previewTable}`}
|
||||
open={previewOpen}
|
||||
onClose={() => { setPreviewOpen(false); setPreviewTable(''); setPreviewData(null); }}
|
||||
width={900}
|
||||
>
|
||||
{previewLoading && <Alert type="info" showIcon message="正在加载差异预览..." />}
|
||||
{!previewLoading && previewData && (
|
||||
<div>
|
||||
<Alert
|
||||
type="info"
|
||||
showIcon
|
||||
message={`插入 ${previewData.totalInserts || 0},更新 ${previewData.totalUpdates || 0},删除 ${previewData.totalDeletes || 0}(预览最多展示 200 条/类型)`}
|
||||
/>
|
||||
<Divider />
|
||||
<Tabs
|
||||
items={[
|
||||
{
|
||||
key: 'insert',
|
||||
label: `插入(${previewData.totalInserts || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部插入差异”;如不想执行插入请在对比结果中取消勾选“插入”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.inserts || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedInsertPks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedInsertPks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.insert }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'update',
|
||||
label: `更新(${previewData.totalUpdates || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Text type="secondary">未勾选任何行表示“同步全部更新差异”;如不想执行更新请在对比结果中取消勾选“更新”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.updates || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedUpdatePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedUpdatePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.update }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '变更字段', dataIndex: 'changedColumns', key: 'changedColumns', render: (v: any) => Array.isArray(v) ? v.join(', ') : '' },
|
||||
{
|
||||
title: '详情',
|
||||
key: 'detail',
|
||||
width: 80,
|
||||
render: (_: any, r: any) => (
|
||||
<Button size="small" onClick={() => {
|
||||
Modal.info({
|
||||
title: `更新详情:${previewTable} / ${r.pk}`,
|
||||
width: 900,
|
||||
content: (
|
||||
<div style={{ display: 'flex', gap: 12 }}>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>源</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.source, null, 2)}</pre>
|
||||
</div>
|
||||
<div style={{ flex: 1 }}>
|
||||
<Title level={5}>目标</Title>
|
||||
<pre style={{ maxHeight: 360, overflow: 'auto', background: '#f5f5f5', padding: 8 }}>{JSON.stringify(r.target, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
});
|
||||
}}>查看</Button>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
{
|
||||
key: 'delete',
|
||||
label: `删除(${previewData.totalDeletes || 0})`,
|
||||
children: (
|
||||
<div>
|
||||
<Alert type="warning" showIcon message="删除默认不勾选。请确认业务允许后再开启删除操作。" />
|
||||
<Text type="secondary">未勾选任何行表示“同步全部删除差异”;如不想执行删除请在对比结果中取消勾选“删除”。</Text>
|
||||
<Table
|
||||
size="small"
|
||||
style={{ marginTop: 8 }}
|
||||
rowKey={(r: any) => r.pk}
|
||||
dataSource={(previewData.deletes || []).map((r: any) => ({ ...r, key: r.pk }))}
|
||||
pagination={false}
|
||||
rowSelection={{
|
||||
selectedRowKeys: (tableOptions[previewTable]?.selectedDeletePks || []) as any,
|
||||
onChange: (keys) => updateTableOption(previewTable, 'selectedDeletePks', keys as string[]),
|
||||
getCheckboxProps: () => ({ disabled: !tableOptions[previewTable]?.delete }),
|
||||
}}
|
||||
columns={[
|
||||
{ title: previewData.pkColumn || '主键', dataIndex: 'pk', key: 'pk', width: 200, ellipsis: true },
|
||||
{ title: '数据', dataIndex: 'row', key: 'row', render: (v: any) => <pre style={{ margin: 0, maxHeight: 140, overflow: 'auto' }}>{JSON.stringify(v, null, 2)}</pre> }
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useEffect, useState, useCallback } from 'react';
|
||||
import React, { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import { message } from 'antd';
|
||||
import { TabData, ColumnDefinition } from '../types';
|
||||
import { useStore } from '../store';
|
||||
@@ -11,6 +11,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [pkColumns, setPkColumns] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const { connections, addSqlLog } = useStore();
|
||||
const fetchSeqRef = useRef(0);
|
||||
|
||||
const [pagination, setPagination] = useState({
|
||||
current: 1,
|
||||
@@ -24,11 +25,12 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
const [filterConditions, setFilterConditions] = useState<any[]>([]);
|
||||
|
||||
const fetchData = useCallback(async (page = pagination.current, size = pagination.pageSize) => {
|
||||
const seq = ++fetchSeqRef.current;
|
||||
setLoading(true);
|
||||
const conn = connections.find(c => c.id === tab.connectionId);
|
||||
if (!conn) {
|
||||
message.error("Connection not found");
|
||||
setLoading(false);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -135,15 +137,15 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
if (fieldNames.length === 0 && resultData.length > 0) {
|
||||
fieldNames = Object.keys(resultData[0]);
|
||||
}
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
setColumnNames(fieldNames);
|
||||
|
||||
setData(resultData.map((row: any, i: number) => ({ ...row, key: `row-${i}` })));
|
||||
|
||||
setData(resultData.map((row: any, i: number) => ({ ...row, key: `row-${i}` })));
|
||||
setPagination(prev => ({ ...prev, current: page, pageSize: size, total: totalRecords }));
|
||||
} else {
|
||||
message.error(resData.message);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (fetchSeqRef.current !== seq) return;
|
||||
message.error("Error fetching data: " + e.message);
|
||||
addSqlLog({
|
||||
id: `log-${Date.now()}-error`,
|
||||
@@ -155,7 +157,7 @@ const DataViewer: React.FC<{ tab: TabData }> = ({ tab }) => {
|
||||
dbName
|
||||
});
|
||||
}
|
||||
setLoading(false);
|
||||
if (fetchSeqRef.current === seq) setLoading(false);
|
||||
}, [connections, tab, sortInfo, filterConditions, pkColumns.length]);
|
||||
// Depend on pkColumns.length to avoid loop? No, pkColumns is updated inside.
|
||||
// Actually, 'pkColumns' state shouldn't trigger re-fetch.
|
||||
|
||||
4
frontend/wailsjs/go/app/App.d.ts
vendored
4
frontend/wailsjs/go/app/App.d.ts
vendored
@@ -29,6 +29,10 @@ export function DBShowCreateTable(arg1:connection.ConnectionConfig,arg2:string,a
|
||||
|
||||
export function DataSync(arg1:sync.SyncConfig):Promise<sync.SyncResult>;
|
||||
|
||||
export function DataSyncAnalyze(arg1:sync.SyncConfig):Promise<connection.QueryResult>;
|
||||
|
||||
export function DataSyncPreview(arg1:sync.SyncConfig,arg2:string,arg3:number):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportData(arg1:Array<Record<string, any>>,arg2:Array<string>,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
export function ExportTable(arg1:connection.ConnectionConfig,arg2:string,arg3:string,arg4:string):Promise<connection.QueryResult>;
|
||||
|
||||
@@ -54,6 +54,14 @@ export function DataSync(arg1) {
|
||||
return window['go']['app']['App']['DataSync'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncAnalyze(arg1) {
|
||||
return window['go']['app']['App']['DataSyncAnalyze'](arg1);
|
||||
}
|
||||
|
||||
export function DataSyncPreview(arg1, arg2, arg3) {
|
||||
return window['go']['app']['App']['DataSyncPreview'](arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
export function ExportData(arg1, arg2, arg3, arg4) {
|
||||
return window['go']['app']['App']['ExportData'](arg1, arg2, arg3, arg4);
|
||||
}
|
||||
|
||||
@@ -142,11 +142,37 @@ export namespace connection {
|
||||
|
||||
export namespace sync {
|
||||
|
||||
export class TableOptions {
|
||||
insert?: boolean;
|
||||
update?: boolean;
|
||||
delete?: boolean;
|
||||
selectedInsertPks?: string[];
|
||||
selectedUpdatePks?: string[];
|
||||
selectedDeletePks?: string[];
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new TableOptions(source);
|
||||
}
|
||||
|
||||
constructor(source: any = {}) {
|
||||
if ('string' === typeof source) source = JSON.parse(source);
|
||||
this.insert = source["insert"];
|
||||
this.update = source["update"];
|
||||
this.delete = source["delete"];
|
||||
this.selectedInsertPks = source["selectedInsertPks"];
|
||||
this.selectedUpdatePks = source["selectedUpdatePks"];
|
||||
this.selectedDeletePks = source["selectedDeletePks"];
|
||||
}
|
||||
}
|
||||
export class SyncConfig {
|
||||
sourceConfig: connection.ConnectionConfig;
|
||||
targetConfig: connection.ConnectionConfig;
|
||||
tables: string[];
|
||||
content?: string;
|
||||
mode: string;
|
||||
jobId?: string;
|
||||
autoAddColumns?: boolean;
|
||||
tableOptions?: Record<string, TableOptions>;
|
||||
|
||||
static createFrom(source: any = {}) {
|
||||
return new SyncConfig(source);
|
||||
@@ -157,7 +183,11 @@ export namespace sync {
|
||||
this.sourceConfig = this.convertValues(source["sourceConfig"], connection.ConnectionConfig);
|
||||
this.targetConfig = this.convertValues(source["targetConfig"], connection.ConnectionConfig);
|
||||
this.tables = source["tables"];
|
||||
this.content = source["content"];
|
||||
this.mode = source["mode"];
|
||||
this.jobId = source["jobId"];
|
||||
this.autoAddColumns = source["autoAddColumns"];
|
||||
this.tableOptions = this.convertValues(source["tableOptions"], TableOptions, true);
|
||||
}
|
||||
|
||||
convertValues(a: any, classs: any, asMap: boolean = false): any {
|
||||
|
||||
@@ -1,11 +1,99 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"GoNavi-Wails/internal/sync"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
// DataSync executes a data synchronization task
|
||||
func (a *App) DataSync(config sync.SyncConfig) sync.SyncResult {
|
||||
engine := sync.NewSyncEngine()
|
||||
return engine.RunSync(config)
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("sync-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.RunSync(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
// DataSyncAnalyze analyzes differences between source and target for the given tables (dry-run).
|
||||
func (a *App) DataSyncAnalyze(config sync.SyncConfig) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("analyze-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
reporter := sync.Reporter{
|
||||
OnLog: func(event sync.SyncLogEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncLog, event)
|
||||
},
|
||||
OnProgress: func(event sync.SyncProgressEvent) {
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncProgress, event)
|
||||
},
|
||||
}
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncStart, map[string]any{
|
||||
"jobId": jobID,
|
||||
"total": len(config.Tables),
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
engine := sync.NewSyncEngine(reporter)
|
||||
res := engine.Analyze(config)
|
||||
|
||||
runtime.EventsEmit(a.ctx, sync.EventSyncDone, map[string]any{
|
||||
"jobId": jobID,
|
||||
"result": res,
|
||||
"type": "analyze",
|
||||
})
|
||||
|
||||
if !res.Success {
|
||||
return connection.QueryResult{Success: false, Message: res.Message, Data: res}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: res.Message, Data: res}
|
||||
}
|
||||
|
||||
// DataSyncPreview returns a limited preview of diff rows for one table.
|
||||
func (a *App) DataSyncPreview(config sync.SyncConfig, tableName string, limit int) connection.QueryResult {
|
||||
jobID := strings.TrimSpace(config.JobID)
|
||||
if jobID == "" {
|
||||
jobID = fmt.Sprintf("preview-%d", time.Now().UnixNano())
|
||||
config.JobID = jobID
|
||||
}
|
||||
|
||||
engine := sync.NewSyncEngine(sync.Reporter{})
|
||||
preview, err := engine.Preview(config, tableName, limit)
|
||||
if err != nil {
|
||||
return connection.QueryResult{Success: false, Message: err.Error()}
|
||||
}
|
||||
return connection.QueryResult{Success: true, Message: "OK", Data: preview}
|
||||
}
|
||||
|
||||
@@ -88,19 +88,7 @@ func (c *CustomDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
@@ -119,19 +119,7 @@ func (d *DamengDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
@@ -150,19 +150,7 @@ func (k *KingbaseDB) Query(query string) ([]map[string]interface{}, []string, er
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ func (m *MySQLDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
m.conn = db
|
||||
m.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
|
||||
// Force verification
|
||||
if err := m.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
@@ -107,19 +107,7 @@ func (m *MySQLDB) Query(query string) ([]map[string]interface{}, []string, error
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -159,12 +147,12 @@ func (m *MySQLDB) GetTables(dbName string) ([]string, error) {
|
||||
if dbName != "" {
|
||||
query = fmt.Sprintf("SHOW TABLES FROM `%s`", dbName)
|
||||
}
|
||||
|
||||
|
||||
data, _, err := m.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
for _, v := range row {
|
||||
@@ -185,7 +173,7 @@ func (m *MySQLDB) GetCreateStatement(dbName, tableName string) (string, error) {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
|
||||
if len(data) > 0 {
|
||||
if val, ok := data[0]["Create Table"]; ok {
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
@@ -215,12 +203,12 @@ func (m *MySQLDB) GetColumns(dbName, tableName string) ([]connection.ColumnDefin
|
||||
Extra: fmt.Sprintf("%v", row["Extra"]),
|
||||
Comment: fmt.Sprintf("%v", row["Comment"]),
|
||||
}
|
||||
|
||||
|
||||
if row["Default"] != nil {
|
||||
d := fmt.Sprintf("%v", row["Default"])
|
||||
col.Default = &d
|
||||
}
|
||||
|
||||
|
||||
columns = append(columns, col)
|
||||
}
|
||||
return columns, nil
|
||||
@@ -248,14 +236,14 @@ func (m *MySQLDB) GetIndexes(dbName, tableName string) ([]connection.IndexDefini
|
||||
}
|
||||
}
|
||||
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
seq := 0
|
||||
if val, ok := row["Seq_in_index"]; ok {
|
||||
if f, ok := val.(float64); ok {
|
||||
seq = int(f)
|
||||
} else if i, ok := val.(int64); ok {
|
||||
seq = int(i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
idx := connection.IndexDefinition{
|
||||
Name: fmt.Sprintf("%v", row["Key_name"]),
|
||||
@@ -345,12 +333,12 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
for _, update := range changes.Updates {
|
||||
var sets []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range update.Values {
|
||||
sets = append(sets, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(sets) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -360,7 +348,7 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
wheres = append(wheres, fmt.Sprintf("`%s` = ?", k))
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(wheres) == 0 {
|
||||
return fmt.Errorf("update requires keys")
|
||||
}
|
||||
@@ -376,13 +364,13 @@ func (m *MySQLDB) ApplyChanges(tableName string, changes connection.ChangeSet) e
|
||||
var cols []string
|
||||
var placeholders []string
|
||||
var args []interface{}
|
||||
|
||||
|
||||
for k, v := range row {
|
||||
cols = append(cols, fmt.Sprintf("`%s`", k))
|
||||
placeholders = append(placeholders, "?")
|
||||
args = append(args, v)
|
||||
}
|
||||
|
||||
|
||||
if len(cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -125,19 +125,7 @@ func (o *OracleDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ func (p *PostgresDB) Connect(config connection.ConnectionConfig) error {
|
||||
}
|
||||
p.conn = db
|
||||
p.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
|
||||
// Force verification
|
||||
if err := p.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
@@ -81,8 +81,7 @@ func (p *PostgresDB) Query(query string) ([]map[string]interface{}, []string, er
|
||||
return nil, nil, fmt.Errorf("connection not open")
|
||||
}
|
||||
|
||||
|
||||
rows, err := p.conn.Query(query)
|
||||
rows, err := p.conn.Query(query)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -108,19 +107,7 @@ rows, err := p.conn.Query(query)
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -159,7 +146,7 @@ func (p *PostgresDB) GetTables(dbName string) ([]string, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
schema, okSchema := row["schemaname"]
|
||||
|
||||
58
internal/db/query_value.go
Normal file
58
internal/db/query_value.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// normalizeQueryValue normalizes driver-returned values for UI/JSON transport.
|
||||
// 当前主要处理 []byte:如果是可读文本则转为 string,否则转为十六进制字符串,避免前端出现“空白值”。
|
||||
func normalizeQueryValue(v interface{}) interface{} {
|
||||
if b, ok := v.([]byte); ok {
|
||||
return bytesToReadableString(b)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func bytesToReadableString(b []byte) interface{} {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
if utf8.Valid(b) {
|
||||
s := string(b)
|
||||
if isMostlyPrintable(s) {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
return "0x" + hex.EncodeToString(b)
|
||||
}
|
||||
|
||||
func isMostlyPrintable(s string) bool {
|
||||
if s == "" {
|
||||
return true
|
||||
}
|
||||
|
||||
total := 0
|
||||
printable := 0
|
||||
for _, r := range s {
|
||||
total++
|
||||
switch r {
|
||||
case '\n', '\r', '\t':
|
||||
printable++
|
||||
continue
|
||||
default:
|
||||
}
|
||||
if unicode.IsPrint(r) {
|
||||
printable++
|
||||
}
|
||||
}
|
||||
|
||||
// 允许少量不可见字符,避免把正常文本误判为二进制。
|
||||
return printable*100 >= total*90
|
||||
}
|
||||
@@ -17,14 +17,14 @@ type SQLiteDB struct {
|
||||
}
|
||||
|
||||
func (s *SQLiteDB) Connect(config connection.ConnectionConfig) error {
|
||||
dsn := config.Host
|
||||
dsn := config.Host
|
||||
db, err := sql.Open("sqlite", dsn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("打开数据库连接失败:%w", err)
|
||||
}
|
||||
s.conn = db
|
||||
s.pingTimeout = getConnectTimeout(config)
|
||||
|
||||
|
||||
// Force verification
|
||||
if err := s.Ping(); err != nil {
|
||||
return fmt.Errorf("连接建立后验证失败:%w", err)
|
||||
@@ -83,19 +83,7 @@ func (s *SQLiteDB) Query(query string) ([]map[string]interface{}, []string, erro
|
||||
|
||||
entry := make(map[string]interface{})
|
||||
for i, col := range columns {
|
||||
var v interface{}
|
||||
val := values[i]
|
||||
b, ok := val.([]byte)
|
||||
if ok {
|
||||
if b == nil {
|
||||
v = nil
|
||||
} else {
|
||||
v = string(b)
|
||||
}
|
||||
} else {
|
||||
v = val
|
||||
}
|
||||
entry[col] = v
|
||||
entry[col] = normalizeQueryValue(values[i])
|
||||
}
|
||||
resultData = append(resultData, entry)
|
||||
}
|
||||
@@ -124,7 +112,7 @@ func (s *SQLiteDB) GetTables(dbName string) ([]string, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
var tables []string
|
||||
for _, row := range data {
|
||||
if val, ok := row["name"]; ok {
|
||||
|
||||
198
internal/sync/analyze.go
Normal file
198
internal/sync/analyze.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TableDiffSummary struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn,omitempty"`
|
||||
CanSync bool `json:"canSync"`
|
||||
Inserts int `json:"inserts"`
|
||||
Updates int `json:"updates"`
|
||||
Deletes int `json:"deletes"`
|
||||
Same int `json:"same"`
|
||||
Message string `json:"message,omitempty"`
|
||||
HasSchema bool `json:"hasSchema,omitempty"`
|
||||
}
|
||||
|
||||
type SyncAnalyzeResult struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Tables []TableDiffSummary `json:"tables"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Analyze(config SyncConfig) SyncAnalyzeResult {
|
||||
result := SyncAnalyzeResult{Success: true, Tables: []TableDiffSummary{}}
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, nil, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "差异分析开始")
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化源数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return SyncAnalyzeResult{Success: false, Message: "初始化目标数据库驱动失败: " + err.Error()}
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "源数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return SyncAnalyzeResult{Success: false, Message: "目标数据库连接失败: " + err.Error()}
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("分析表(%d/%d)", i+1, totalTables))
|
||||
|
||||
summary := TableDiffSummary{
|
||||
Table: tableName,
|
||||
CanSync: false,
|
||||
Inserts: 0,
|
||||
Updates: 0,
|
||||
Deletes: 0,
|
||||
Same: 0,
|
||||
Message: "",
|
||||
HasSchema: syncSchema,
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
summary.Message = "获取源表字段失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
if !syncData {
|
||||
summary.CanSync = true
|
||||
summary.Message = "仅同步结构,未执行数据差异分析"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
summary.Message = "无主键,不支持数据对比/同步"
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
summary.Message = fmt.Sprintf("复合主键(%s),暂不支持数据对比/同步", strings.Join(pkCols, ","))
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
summary.PKColumn = pkCols[0]
|
||||
|
||||
// Query data for diff
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取源表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
summary.Message = "读取目标表失败: " + err.Error()
|
||||
result.Tables = append(result.Tables, summary)
|
||||
return
|
||||
}
|
||||
|
||||
pkCol := summary.PKColumn
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changed := false
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if changed {
|
||||
summary.Updates++
|
||||
} else {
|
||||
summary.Same++
|
||||
}
|
||||
} else {
|
||||
summary.Inserts++
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; !ok {
|
||||
summary.Deletes++
|
||||
}
|
||||
}
|
||||
|
||||
summary.CanSync = true
|
||||
result.Tables = append(result.Tables, summary)
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "差异分析完成")
|
||||
result.Message = fmt.Sprintf("已完成 %d 张表的差异分析", len(result.Tables))
|
||||
return result
|
||||
}
|
||||
164
internal/sync/preview.go
Normal file
164
internal/sync/preview.go
Normal file
@@ -0,0 +1,164 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PreviewRow struct {
|
||||
PK string `json:"pk"`
|
||||
Row map[string]interface{} `json:"row"`
|
||||
}
|
||||
|
||||
type PreviewUpdateRow struct {
|
||||
PK string `json:"pk"`
|
||||
ChangedColumns []string `json:"changedColumns"`
|
||||
Source map[string]interface{} `json:"source"`
|
||||
Target map[string]interface{} `json:"target"`
|
||||
}
|
||||
|
||||
type TableDiffPreview struct {
|
||||
Table string `json:"table"`
|
||||
PKColumn string `json:"pkColumn"`
|
||||
TotalInserts int `json:"totalInserts"`
|
||||
TotalUpdates int `json:"totalUpdates"`
|
||||
TotalDeletes int `json:"totalDeletes"`
|
||||
Inserts []PreviewRow `json:"inserts"`
|
||||
Updates []PreviewUpdateRow `json:"updates"`
|
||||
Deletes []PreviewRow `json:"deletes"`
|
||||
}
|
||||
|
||||
func (s *SyncEngine) Preview(config SyncConfig, tableName string, limit int) (TableDiffPreview, error) {
|
||||
if limit <= 0 {
|
||||
limit = 200
|
||||
}
|
||||
if limit > 500 {
|
||||
limit = 500
|
||||
}
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化源数据库驱动失败: %w", err)
|
||||
}
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("初始化目标数据库驱动失败: %w", err)
|
||||
}
|
||||
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("源数据库连接失败: %w", err)
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("目标数据库连接失败: %w", err)
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, c := range cols {
|
||||
if c.Key == "PRI" || c.Key == "PK" {
|
||||
pkCols = append(pkCols, c.Name)
|
||||
}
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
return TableDiffPreview{}, fmt.Errorf("无主键,不支持数据预览")
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
return TableDiffPreview{}, fmt.Errorf("复合主键(%s),暂不支持数据预览", strings.Join(pkCols, ","))
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取源表失败: %w", err)
|
||||
}
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
return TableDiffPreview{}, fmt.Errorf("读取目标表失败: %w", err)
|
||||
}
|
||||
|
||||
targetMap := make(map[string]map[string]interface{}, len(targetRows))
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
|
||||
out := TableDiffPreview{
|
||||
Table: tableName,
|
||||
PKColumn: pkCol,
|
||||
TotalInserts: 0,
|
||||
TotalUpdates: 0,
|
||||
TotalDeletes: 0,
|
||||
Inserts: make([]PreviewRow, 0),
|
||||
Updates: make([]PreviewUpdateRow, 0),
|
||||
Deletes: make([]PreviewRow, 0),
|
||||
}
|
||||
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sRow[pkCol]))
|
||||
if pkVal == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changedColumns := make([]string, 0)
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changedColumns = append(changedColumns, k)
|
||||
}
|
||||
}
|
||||
if len(changedColumns) > 0 {
|
||||
out.TotalUpdates++
|
||||
if len(out.Updates) < limit {
|
||||
out.Updates = append(out.Updates, PreviewUpdateRow{
|
||||
PK: pkVal,
|
||||
ChangedColumns: changedColumns,
|
||||
Source: sRow,
|
||||
Target: tRow,
|
||||
})
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
out.TotalInserts++
|
||||
if len(out.Inserts) < limit {
|
||||
out.Inserts = append(out.Inserts, PreviewRow{PK: pkVal, Row: sRow})
|
||||
}
|
||||
}
|
||||
|
||||
for pkVal, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkVal]; ok {
|
||||
continue
|
||||
}
|
||||
out.TotalDeletes++
|
||||
if len(out.Deletes) < limit {
|
||||
out.Deletes = append(out.Deletes, PreviewRow{PK: pkVal, Row: row})
|
||||
}
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
58
internal/sync/row_selection.go
Normal file
58
internal/sync/row_selection.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func filterRowsByPKSelection(pkCol string, rows []map[string]interface{}, enabled bool, selectedPKs []string) []map[string]interface{} {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return rows
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return rows
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(rows))
|
||||
for _, row := range rows {
|
||||
pkStr := fmt.Sprintf("%v", row[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, row)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdatesByPKSelection(pkCol string, updates []connection.UpdateRow, enabled bool, selectedPKs []string) []connection.UpdateRow {
|
||||
if !enabled {
|
||||
return nil
|
||||
}
|
||||
if len(updates) == 0 {
|
||||
return updates
|
||||
}
|
||||
if len(selectedPKs) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
set := make(map[string]struct{}, len(selectedPKs))
|
||||
for _, pk := range selectedPKs {
|
||||
set[pk] = struct{}{}
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
pkStr := fmt.Sprintf("%v", u.Keys[pkCol])
|
||||
if _, ok := set[pkStr]; ok {
|
||||
out = append(out, u)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
97
internal/sync/schema_align.go
Normal file
97
internal/sync/schema_align.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/connection"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func collectRequiredColumns(inserts []map[string]interface{}, updates []connection.UpdateRow) map[string]string {
|
||||
// key: lower(columnName), value: original columnName
|
||||
required := make(map[string]string)
|
||||
for _, row := range inserts {
|
||||
for k := range row {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, u := range updates {
|
||||
for k := range u.Values {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := required[key]; !exists {
|
||||
required[key] = k
|
||||
}
|
||||
}
|
||||
}
|
||||
return required
|
||||
}
|
||||
|
||||
func filterInsertRows(inserts []map[string]interface{}, allowedLower map[string]struct{}) []map[string]interface{} {
|
||||
if len(inserts) == 0 || len(allowedLower) == 0 {
|
||||
return inserts
|
||||
}
|
||||
|
||||
out := make([]map[string]interface{}, 0, len(inserts))
|
||||
for _, row := range inserts {
|
||||
if len(row) == 0 {
|
||||
out = append(out, row)
|
||||
continue
|
||||
}
|
||||
n := make(map[string]interface{}, len(row))
|
||||
for k, v := range row {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
n[k] = v
|
||||
}
|
||||
}
|
||||
out = append(out, n)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func filterUpdateRows(updates []connection.UpdateRow, allowedLower map[string]struct{}) []connection.UpdateRow {
|
||||
if len(updates) == 0 || len(allowedLower) == 0 {
|
||||
return updates
|
||||
}
|
||||
|
||||
out := make([]connection.UpdateRow, 0, len(updates))
|
||||
for _, u := range updates {
|
||||
if len(u.Values) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
values := make(map[string]interface{}, len(u.Values))
|
||||
for k, v := range u.Values {
|
||||
if _, ok := allowedLower[strings.ToLower(strings.TrimSpace(k))]; ok {
|
||||
values[k] = v
|
||||
}
|
||||
}
|
||||
if len(values) == 0 {
|
||||
continue
|
||||
}
|
||||
out = append(out, connection.UpdateRow{
|
||||
Keys: u.Keys,
|
||||
Values: values,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func sanitizeMySQLColumnType(t string) string {
|
||||
tt := strings.TrimSpace(t)
|
||||
if tt == "" {
|
||||
return "TEXT"
|
||||
}
|
||||
|
||||
// 基础防护:避免把元数据中异常内容拼进 SQL。
|
||||
if strings.ContainsAny(tt, "`;\n\r") {
|
||||
return "TEXT"
|
||||
}
|
||||
return tt
|
||||
}
|
||||
101
internal/sync/schema_sync.go
Normal file
101
internal/sync/schema_sync.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package sync
|
||||
|
||||
import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (s *SyncEngine) syncTableSchema(config SyncConfig, res *SyncResult, sourceDB db.Database, targetDB db.Database, tableName string) error {
|
||||
targetType := strings.ToLower(strings.TrimSpace(config.TargetConfig.Type))
|
||||
if targetType != "mysql" {
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标数据库类型=%s 暂不支持结构同步,已跳过表 %s", config.TargetConfig.Type, tableName))
|
||||
return nil
|
||||
}
|
||||
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 1) 获取源表字段
|
||||
sourceCols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("获取源表字段失败: %w", err)
|
||||
}
|
||||
|
||||
// 2) 确保目标表存在
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
if sourceType != "mysql" {
|
||||
return fmt.Errorf("目标表不存在且源类型=%s 暂不支持自动建表: %w", config.SourceConfig.Type, err)
|
||||
}
|
||||
|
||||
s.appendLog(config.JobID, res, "warn", fmt.Sprintf("目标表 %s 不存在,开始尝试创建表结构", tableName))
|
||||
createSQL, errCreate := sourceDB.GetCreateStatement(sourceSchema, sourceTable)
|
||||
if errCreate != nil || strings.TrimSpace(createSQL) == "" {
|
||||
if errCreate == nil {
|
||||
errCreate = fmt.Errorf("建表语句为空")
|
||||
}
|
||||
return fmt.Errorf("获取源表建表语句失败: %w", errCreate)
|
||||
}
|
||||
|
||||
if _, errExec := targetDB.Exec(createSQL); errExec != nil {
|
||||
return fmt.Errorf("创建目标表失败: %w", errExec)
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("目标表创建成功:%s", tableName))
|
||||
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
return fmt.Errorf("创建目标表后获取字段失败: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
// 3) 补齐目标缺失字段(安全策略:新增字段统一允许 NULL)
|
||||
missing := make([]string, 0)
|
||||
sourceType := strings.ToLower(strings.TrimSpace(config.SourceConfig.Type))
|
||||
for _, c := range sourceCols {
|
||||
colName := strings.TrimSpace(c.Name)
|
||||
if colName == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(colName)
|
||||
if _, ok := targetColSet[lower]; ok {
|
||||
continue
|
||||
}
|
||||
missing = append(missing, colName)
|
||||
|
||||
colType := "TEXT"
|
||||
if sourceType == "mysql" {
|
||||
colType = sanitizeMySQLColumnType(c.Type)
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, res, "error", fmt.Sprintf(" -> 补字段失败:表=%s 字段=%s 错误=%v", tableName, colName, err))
|
||||
continue
|
||||
}
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf(" -> 已补齐字段:表=%s 字段=%s 类型=%s", tableName, colName, colType))
|
||||
}
|
||||
|
||||
if len(missing) == 0 {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构一致:%s", tableName))
|
||||
} else {
|
||||
s.appendLog(config.JobID, res, "info", fmt.Sprintf("表结构同步完成:%s(新增字段 %d 个)", tableName, len(missing)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
109
internal/sync/sql_helpers.go
Normal file
109
internal/sync/sql_helpers.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package sync
|
||||
|
||||
import "strings"
|
||||
|
||||
func normalizeSyncMode(mode string) string {
|
||||
m := strings.ToLower(strings.TrimSpace(mode))
|
||||
switch m {
|
||||
case "", "insert_update":
|
||||
return "insert_update"
|
||||
case "insert_only":
|
||||
return "insert_only"
|
||||
case "full_overwrite":
|
||||
return "full_overwrite"
|
||||
default:
|
||||
return "insert_update"
|
||||
}
|
||||
}
|
||||
|
||||
func quoteIdentByType(dbType string, ident string) string {
|
||||
if ident == "" {
|
||||
return ident
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "mysql":
|
||||
return "`" + strings.ReplaceAll(ident, "`", "``") + "`"
|
||||
default:
|
||||
return `"` + strings.ReplaceAll(ident, `"`, `""`) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
func quoteQualifiedIdentByType(dbType string, ident string) string {
|
||||
raw := strings.TrimSpace(ident)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
|
||||
parts := strings.Split(raw, ".")
|
||||
if len(parts) <= 1 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
|
||||
quotedParts := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
quotedParts = append(quotedParts, quoteIdentByType(dbType, part))
|
||||
}
|
||||
|
||||
if len(quotedParts) == 0 {
|
||||
return quoteIdentByType(dbType, raw)
|
||||
}
|
||||
return strings.Join(quotedParts, ".")
|
||||
}
|
||||
|
||||
func normalizeSchemaAndTable(dbType string, dbName string, tableName string) (string, string) {
|
||||
rawTable := strings.TrimSpace(tableName)
|
||||
rawDB := strings.TrimSpace(dbName)
|
||||
if rawTable == "" {
|
||||
return rawDB, rawTable
|
||||
}
|
||||
|
||||
if parts := strings.SplitN(rawTable, ".", 2); len(parts) == 2 {
|
||||
schema := strings.TrimSpace(parts[0])
|
||||
table := strings.TrimSpace(parts[1])
|
||||
if schema != "" && table != "" {
|
||||
return schema, table
|
||||
}
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase":
|
||||
return "public", rawTable
|
||||
default:
|
||||
return rawDB, rawTable
|
||||
}
|
||||
}
|
||||
|
||||
func qualifiedNameForQuery(dbType string, schema string, table string, original string) string {
|
||||
raw := strings.TrimSpace(original)
|
||||
if raw == "" {
|
||||
return raw
|
||||
}
|
||||
if strings.Contains(raw, ".") {
|
||||
return raw
|
||||
}
|
||||
|
||||
switch strings.ToLower(strings.TrimSpace(dbType)) {
|
||||
case "postgres", "kingbase":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" {
|
||||
s = "public"
|
||||
}
|
||||
if table == "" {
|
||||
return raw
|
||||
}
|
||||
return s + "." + table
|
||||
case "mysql":
|
||||
s := strings.TrimSpace(schema)
|
||||
if s == "" || table == "" {
|
||||
return table
|
||||
}
|
||||
return s + "." + table
|
||||
default:
|
||||
return table
|
||||
}
|
||||
}
|
||||
@@ -5,15 +5,21 @@ import (
|
||||
"GoNavi-Wails/internal/db"
|
||||
"GoNavi-Wails/internal/logger"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// SyncConfig defines the parameters for a synchronization task
|
||||
type SyncConfig struct {
|
||||
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
|
||||
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
|
||||
Tables []string `json:"tables"` // Tables to sync
|
||||
Mode string `json:"mode"` // "insert_update", "full_overwrite"
|
||||
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
|
||||
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
|
||||
Tables []string `json:"tables"` // Tables to sync
|
||||
Content string `json:"content,omitempty"` // "data", "schema", "both"
|
||||
Mode string `json:"mode"` // "insert_update", "insert_only", "full_overwrite"
|
||||
JobID string `json:"jobId,omitempty"`
|
||||
AutoAddColumns bool `json:"autoAddColumns,omitempty"` // 自动补齐缺失字段(当前仅 MySQL 目标支持)
|
||||
TableOptions map[string]TableOptions `json:"tableOptions,omitempty"`
|
||||
}
|
||||
|
||||
// SyncResult holds the result of the sync operation
|
||||
@@ -28,21 +34,55 @@ type SyncResult struct {
|
||||
}
|
||||
|
||||
type SyncEngine struct {
|
||||
reporter Reporter
|
||||
}
|
||||
|
||||
func NewSyncEngine() *SyncEngine {
|
||||
return &SyncEngine{}
|
||||
func NewSyncEngine(reporter Reporter) *SyncEngine {
|
||||
return &SyncEngine{reporter: reporter}
|
||||
}
|
||||
|
||||
// CompareAndSync performs the synchronization
|
||||
func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
|
||||
result := SyncResult{Success: true, Logs: []string{}}
|
||||
logger.Infof("开始数据同步:源=%s 目标=%s 表数量=%d", formatConnSummaryForSync(config.SourceConfig), formatConnSummaryForSync(config.TargetConfig), len(config.Tables))
|
||||
totalTables := len(config.Tables)
|
||||
s.progress(config.JobID, 0, totalTables, "", "开始同步")
|
||||
|
||||
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
|
||||
syncSchema := false
|
||||
syncData := true
|
||||
switch contentRaw {
|
||||
case "", "data":
|
||||
syncData = true
|
||||
case "schema":
|
||||
syncSchema = true
|
||||
syncData = false
|
||||
case "both":
|
||||
syncSchema = true
|
||||
syncData = true
|
||||
default:
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步内容 %q,已自动使用仅同步数据", config.Content))
|
||||
syncData = true
|
||||
}
|
||||
|
||||
modeRaw := strings.ToLower(strings.TrimSpace(config.Mode))
|
||||
if modeRaw != "" && modeRaw != "insert_update" && modeRaw != "insert_only" && modeRaw != "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("未知同步模式 %q,已自动使用 insert_update", config.Mode))
|
||||
}
|
||||
defaultMode := normalizeSyncMode(config.Mode)
|
||||
|
||||
contentLabel := "仅同步数据"
|
||||
if syncSchema && syncData {
|
||||
contentLabel = "同步结构+数据"
|
||||
} else if syncSchema {
|
||||
contentLabel = "仅同步结构"
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("同步内容:%s;模式:%s;自动补字段:%v", contentLabel, defaultMode, config.AutoAddColumns))
|
||||
|
||||
sourceDB, err := db.NewDatabase(config.SourceConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化源数据库驱动失败:类型=%s", config.SourceConfig.Type)
|
||||
return s.fail(result, "初始化源数据库驱动失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "初始化源数据库驱动失败: "+err.Error())
|
||||
}
|
||||
if config.SourceConfig.Type == "custom" {
|
||||
// Custom DB setup would go here if needed
|
||||
@@ -51,133 +91,402 @@ func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
|
||||
targetDB, err := db.NewDatabase(config.TargetConfig.Type)
|
||||
if err != nil {
|
||||
logger.Error(err, "初始化目标数据库驱动失败:类型=%s", config.TargetConfig.Type)
|
||||
return s.fail(result, "初始化目标数据库驱动失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "初始化目标数据库驱动失败: "+err.Error())
|
||||
}
|
||||
|
||||
// Connect Source
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在连接源数据库: %s...", config.SourceConfig.Host))
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接源数据库: %s...", config.SourceConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接源数据库")
|
||||
if err := sourceDB.Connect(config.SourceConfig); err != nil {
|
||||
logger.Error(err, "源数据库连接失败:%s", formatConnSummaryForSync(config.SourceConfig))
|
||||
return s.fail(result, "源数据库连接失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "源数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer sourceDB.Close()
|
||||
|
||||
// Connect Target
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在连接目标数据库: %s...", config.TargetConfig.Host))
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在连接目标数据库: %s...", config.TargetConfig.Host))
|
||||
s.progress(config.JobID, 0, totalTables, "", "连接目标数据库")
|
||||
if err := targetDB.Connect(config.TargetConfig); err != nil {
|
||||
logger.Error(err, "目标数据库连接失败:%s", formatConnSummaryForSync(config.TargetConfig))
|
||||
return s.fail(result, "目标数据库连接失败: "+err.Error())
|
||||
return s.fail(config.JobID, totalTables, result, "目标数据库连接失败: "+err.Error())
|
||||
}
|
||||
defer targetDB.Close()
|
||||
|
||||
// Iterate Tables
|
||||
for _, tableName := range config.Tables {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("正在同步表: %s", tableName))
|
||||
for i, tableName := range config.Tables {
|
||||
func() {
|
||||
tableMode := defaultMode
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("正在同步表: %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, fmt.Sprintf("同步表(%d/%d)", i+1, totalTables))
|
||||
defer s.progress(config.JobID, i+1, totalTables, tableName, "表处理完成")
|
||||
|
||||
// 1. Get Columns & PKs (Naive approach: assume same schema)
|
||||
cols, err := sourceDB.GetColumns(config.SourceConfig.Database, tableName)
|
||||
if err != nil {
|
||||
logger.Error(err, "获取源表列信息失败:表=%s", tableName)
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("获取表 %s 的列信息失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
|
||||
pkCol := ""
|
||||
for _, col := range cols {
|
||||
if col.Key == "PRI" || col.Key == "PK" {
|
||||
pkCol = col.Name
|
||||
break
|
||||
if syncSchema {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "同步表结构")
|
||||
if err := s.syncTableSchema(config, &result, sourceDB, targetDB, tableName); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("表结构同步失败:表=%s 错误=%v", tableName, err))
|
||||
return
|
||||
}
|
||||
}
|
||||
if !syncData {
|
||||
result.TablesSynced++
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if pkCol == "" {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("跳过表 %s: 未找到主键 (同步需要主键)", tableName))
|
||||
continue
|
||||
}
|
||||
sourceSchema, sourceTable := normalizeSchemaAndTable(config.SourceConfig.Type, config.SourceConfig.Database, tableName)
|
||||
targetSchema, targetTable := normalizeSchemaAndTable(config.TargetConfig.Type, config.TargetConfig.Database, tableName)
|
||||
sourceQueryTable := qualifiedNameForQuery(config.SourceConfig.Type, sourceSchema, sourceTable, tableName)
|
||||
targetQueryTable := qualifiedNameForQuery(config.TargetConfig.Type, targetSchema, targetTable, tableName)
|
||||
|
||||
// 2. Fetch Data (MEMORY INTENSIVE - PROTOTYPE ONLY)
|
||||
// TODO: Implement paging/streaming
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", tableName))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取源表失败:表=%s", tableName)
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("读取源表 %s 失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
// 1. Get Columns & PKs
|
||||
cols, err := sourceDB.GetColumns(sourceSchema, sourceTable)
|
||||
if err != nil {
|
||||
logger.Error(err, "获取源表列信息失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("获取表 %s 的列信息失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
sourceColsByLower := make(map[string]connection.ColumnDefinition, len(cols))
|
||||
for _, col := range cols {
|
||||
if strings.TrimSpace(col.Name) == "" {
|
||||
continue
|
||||
}
|
||||
sourceColsByLower[strings.ToLower(strings.TrimSpace(col.Name))] = col
|
||||
}
|
||||
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", tableName))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取目标表失败:表=%s", tableName)
|
||||
// Table might not exist in target?
|
||||
// Check if error is "table not found" -> Try to Create?
|
||||
// For now, assume table exists.
|
||||
result.Logs = append(result.Logs, fmt.Sprintf("读取目标表 %s 失败: %v", tableName, err))
|
||||
continue
|
||||
}
|
||||
pkCols := make([]string, 0, 2)
|
||||
for _, col := range cols {
|
||||
if col.Key == "PRI" || col.Key == "PK" {
|
||||
pkCols = append(pkCols, col.Name)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Compare (In-Memory Hash Map)
|
||||
targetMap := make(map[string]map[string]interface{})
|
||||
for _, row := range targetRows {
|
||||
pkVal := fmt.Sprintf("%v", row[pkCol])
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
if len(pkCols) == 0 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 未找到主键,已跳过数据同步(避免产生重复数据)", tableName))
|
||||
return
|
||||
}
|
||||
if len(pkCols) > 1 {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf("表 %s 为复合主键(%s),当前暂不支持数据同步", tableName, strings.Join(pkCols, ",")))
|
||||
return
|
||||
}
|
||||
pkCol := pkCols[0]
|
||||
|
||||
var inserts []map[string]interface{}
|
||||
var updates []connection.UpdateRow
|
||||
// var deletes []map[string]interface{} // Not implemented in "insert_update" mode usually
|
||||
|
||||
for _, sRow := range sourceRows {
|
||||
pkVal := fmt.Sprintf("%v", sRow[pkCol])
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
// Update? Compare values
|
||||
// Simplified: Compare string representations or iterate keys
|
||||
// For prototype: assume update if exists
|
||||
// Optimization: Check diff
|
||||
changes := make(map[string]interface{})
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changes[k] = v
|
||||
opts := TableOptions{Insert: true, Update: true, Delete: false}
|
||||
if config.TableOptions != nil {
|
||||
if t, ok := config.TableOptions[tableName]; ok {
|
||||
opts = t
|
||||
// 默认防护:如用户未设置任意一个字段,保持 insert/update 默认 true、delete 默认 false
|
||||
if !t.Insert && !t.Update && !t.Delete {
|
||||
opts = t
|
||||
}
|
||||
}
|
||||
if len(changes) > 0 {
|
||||
updates = append(updates, connection.UpdateRow{
|
||||
Keys: map[string]interface{}{pkCol: pkVal},
|
||||
Values: changes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Insert
|
||||
inserts = append(inserts, sRow)
|
||||
}
|
||||
}
|
||||
if !opts.Insert && !opts.Update && !opts.Delete {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("表 %s 未勾选任何操作,已跳过", tableName))
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Apply Changes
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
}
|
||||
// 2. Fetch Data (MEMORY INTENSIVE - PROTOTYPE ONLY)
|
||||
// TODO: Implement paging/streaming
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取源表数据")
|
||||
sourceRows, _, err := sourceDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.SourceConfig.Type, sourceQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取源表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取源表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
if len(inserts) > 0 || len(updates) > 0 {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行", len(inserts), len(updates)))
|
||||
var inserts []map[string]interface{}
|
||||
var updates []connection.UpdateRow
|
||||
|
||||
// We need a BatchApplier interface or assume Database implements ApplyChanges
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(tableName, changeSet); err != nil {
|
||||
result.Logs = append(result.Logs, fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
if tableMode == "insert_update" {
|
||||
s.progress(config.JobID, i, totalTables, tableName, "读取目标表数据")
|
||||
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable)))
|
||||
if err != nil {
|
||||
logger.Error(err, "读取目标表失败:表=%s", tableName)
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf("读取目标表 %s 失败: %v", tableName, err))
|
||||
return
|
||||
}
|
||||
|
||||
// 3. Compare (In-Memory Hash Map)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "对比差异")
|
||||
targetMap := make(map[string]map[string]interface{})
|
||||
for _, row := range targetRows {
|
||||
if row[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", row[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
targetMap[pkVal] = row
|
||||
}
|
||||
sourcePKSet := make(map[string]struct{}, len(sourceRows))
|
||||
|
||||
for _, sRow := range sourceRows {
|
||||
if sRow[pkCol] == nil {
|
||||
continue
|
||||
}
|
||||
pkVal := fmt.Sprintf("%v", sRow[pkCol])
|
||||
if strings.TrimSpace(pkVal) == "" || pkVal == "<nil>" {
|
||||
continue
|
||||
}
|
||||
sourcePKSet[pkVal] = struct{}{}
|
||||
|
||||
if tRow, exists := targetMap[pkVal]; exists {
|
||||
changes := make(map[string]interface{})
|
||||
for k, v := range sRow {
|
||||
if fmt.Sprintf("%v", v) != fmt.Sprintf("%v", tRow[k]) {
|
||||
changes[k] = v
|
||||
}
|
||||
}
|
||||
if len(changes) > 0 {
|
||||
updates = append(updates, connection.UpdateRow{
|
||||
Keys: map[string]interface{}{pkCol: sRow[pkCol]},
|
||||
Values: changes,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
inserts = append(inserts, sRow)
|
||||
}
|
||||
}
|
||||
|
||||
var deletes []map[string]interface{}
|
||||
if opts.Delete {
|
||||
for pkStr, row := range targetMap {
|
||||
if _, ok := sourcePKSet[pkStr]; ok {
|
||||
continue
|
||||
}
|
||||
deletes = append(deletes, map[string]interface{}{pkCol: row[pkCol]})
|
||||
}
|
||||
}
|
||||
|
||||
// apply operation selection
|
||||
inserts = filterRowsByPKSelection(pkCol, inserts, opts.Insert, opts.SelectedInsertPKs)
|
||||
updates = filterUpdatesByPKSelection(pkCol, updates, opts.Update, opts.SelectedUpdatePKs)
|
||||
deletes = filterRowsByPKSelection(pkCol, deletes, opts.Delete, opts.SelectedDeletePKs)
|
||||
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
Deletes: deletes,
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(changeSet.Inserts, changeSet.Updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(inserts)
|
||||
result.RowsUpdated += len(updates)
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
changeSet.Inserts = filterInsertRows(changeSet.Inserts, targetColSet)
|
||||
changeSet.Updates = filterUpdateRows(changeSet.Updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 || len(changeSet.Deletes) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行, 需删除: %d 行", len(changeSet.Inserts), len(changeSet.Updates), len(changeSet.Deletes)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
result.RowsDeleted += len(changeSet.Deletes)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
return
|
||||
} else {
|
||||
// insert_only / full_overwrite: do not compare target, just insert source rows
|
||||
inserts = sourceRows
|
||||
}
|
||||
|
||||
// full_overwrite: clear target table first
|
||||
if tableMode == "full_overwrite" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 全量覆盖模式:即将清空目标表 %s", tableName))
|
||||
s.progress(config.JobID, i, totalTables, tableName, "清空目标表")
|
||||
clearSQL := ""
|
||||
if strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
clearSQL = fmt.Sprintf("TRUNCATE TABLE %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
} else {
|
||||
clearSQL = fmt.Sprintf("DELETE FROM %s", quoteQualifiedIdentByType(config.TargetConfig.Type, targetQueryTable))
|
||||
}
|
||||
if _, err := targetDB.Exec(clearSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 清空目标表失败: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Align schema (target missing columns)
|
||||
s.progress(config.JobID, i, totalTables, tableName, "检查字段一致性")
|
||||
requiredCols := collectRequiredColumns(inserts, updates)
|
||||
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err != nil {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 获取目标表字段失败,已跳过字段一致性检查: %v", err))
|
||||
} else {
|
||||
targetColSet := make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
|
||||
missing := make([]string, 0)
|
||||
for lower, original := range requiredCols {
|
||||
if _, ok := targetColSet[lower]; !ok {
|
||||
missing = append(missing, original)
|
||||
}
|
||||
}
|
||||
sort.Strings(missing)
|
||||
|
||||
if len(missing) > 0 {
|
||||
if config.AutoAddColumns && strings.ToLower(strings.TrimSpace(config.TargetConfig.Type)) == "mysql" {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个,开始自动补齐: %s", len(missing), strings.Join(missing, ", ")))
|
||||
added := 0
|
||||
for _, colName := range missing {
|
||||
colLower := strings.ToLower(strings.TrimSpace(colName))
|
||||
colType := "TEXT"
|
||||
if strings.ToLower(strings.TrimSpace(config.SourceConfig.Type)) == "mysql" {
|
||||
if srcCol, ok := sourceColsByLower[colLower]; ok {
|
||||
colType = sanitizeMySQLColumnType(srcCol.Type)
|
||||
}
|
||||
}
|
||||
|
||||
alterSQL := fmt.Sprintf("ALTER TABLE %s ADD COLUMN %s %s NULL",
|
||||
quoteQualifiedIdentByType("mysql", targetQueryTable),
|
||||
quoteIdentByType("mysql", colName),
|
||||
colType,
|
||||
)
|
||||
if _, err := targetDB.Exec(alterSQL); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 自动补字段失败:字段=%s 错误=%v", colName, err))
|
||||
continue
|
||||
}
|
||||
added++
|
||||
}
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 自动补字段完成:成功=%d 失败=%d", added, len(missing)-added))
|
||||
|
||||
// refresh columns
|
||||
targetCols, err = targetDB.GetColumns(targetSchema, targetTable)
|
||||
if err == nil {
|
||||
targetColSet = make(map[string]struct{}, len(targetCols))
|
||||
for _, c := range targetCols {
|
||||
name := strings.ToLower(strings.TrimSpace(c.Name))
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
targetColSet[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", fmt.Sprintf(" -> 目标表缺少字段 %d 个(未开启自动补齐),将自动忽略:%s", len(missing), strings.Join(missing, ", ")))
|
||||
}
|
||||
|
||||
// filter out still-missing columns to avoid apply failure
|
||||
inserts = filterInsertRows(inserts, targetColSet)
|
||||
updates = filterUpdateRows(updates, targetColSet)
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Apply Changes
|
||||
s.progress(config.JobID, i, totalTables, tableName, "应用变更")
|
||||
changeSet := connection.ChangeSet{
|
||||
Inserts: inserts,
|
||||
Updates: updates,
|
||||
}
|
||||
|
||||
if len(changeSet.Inserts) > 0 || len(changeSet.Updates) > 0 {
|
||||
s.appendLog(config.JobID, &result, "info", fmt.Sprintf(" -> 需插入: %d 行, 需更新: %d 行", len(changeSet.Inserts), len(changeSet.Updates)))
|
||||
|
||||
if applier, ok := targetDB.(db.BatchApplier); ok {
|
||||
if err := applier.ApplyChanges(targetTable, changeSet); err != nil {
|
||||
s.appendLog(config.JobID, &result, "error", fmt.Sprintf(" -> 应用变更失败: %v", err))
|
||||
} else {
|
||||
result.RowsInserted += len(changeSet.Inserts)
|
||||
result.RowsUpdated += len(changeSet.Updates)
|
||||
}
|
||||
} else {
|
||||
s.appendLog(config.JobID, &result, "warn", " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
}
|
||||
} else {
|
||||
result.Logs = append(result.Logs, " -> 目标驱动不支持应用数据变更 (ApplyChanges).")
|
||||
s.appendLog(config.JobID, &result, "info", " -> 数据一致,无需变更.")
|
||||
}
|
||||
} else {
|
||||
result.Logs = append(result.Logs, " -> 数据一致,无需变更.")
|
||||
}
|
||||
|
||||
result.TablesSynced++
|
||||
result.TablesSynced++
|
||||
}()
|
||||
}
|
||||
|
||||
s.progress(config.JobID, totalTables, totalTables, "", "同步完成")
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -196,9 +505,52 @@ func formatConnSummaryForSync(config connection.ConnectionConfig) string {
|
||||
config.Type, config.Host, config.Port, dbName, config.User, timeoutSeconds)
|
||||
}
|
||||
|
||||
func (s *SyncEngine) fail(res SyncResult, msg string) SyncResult {
|
||||
func (s *SyncEngine) appendLog(jobID string, res *SyncResult, level string, msg string) {
|
||||
if res != nil {
|
||||
res.Logs = append(res.Logs, msg)
|
||||
}
|
||||
if s.reporter.OnLog != nil && strings.TrimSpace(jobID) != "" {
|
||||
s.reporter.OnLog(SyncLogEvent{
|
||||
JobID: jobID,
|
||||
Level: level,
|
||||
Message: msg,
|
||||
Ts: time.Now().UnixMilli(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SyncEngine) progress(jobID string, current, total int, table string, stage string) {
|
||||
if s.reporter.OnProgress == nil || strings.TrimSpace(jobID) == "" {
|
||||
return
|
||||
}
|
||||
percent := 0
|
||||
if total <= 0 {
|
||||
if current > 0 {
|
||||
percent = 100
|
||||
}
|
||||
} else {
|
||||
if current < 0 {
|
||||
current = 0
|
||||
}
|
||||
if current > total {
|
||||
current = total
|
||||
}
|
||||
percent = (current * 100) / total
|
||||
}
|
||||
s.reporter.OnProgress(SyncProgressEvent{
|
||||
JobID: jobID,
|
||||
Percent: percent,
|
||||
Current: current,
|
||||
Total: total,
|
||||
Table: table,
|
||||
Stage: stage,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SyncEngine) fail(jobID string, totalTables int, res SyncResult, msg string) SyncResult {
|
||||
res.Success = false
|
||||
res.Message = msg
|
||||
res.Logs = append(res.Logs, "致命错误: "+msg)
|
||||
s.appendLog(jobID, &res, "error", "致命错误: "+msg)
|
||||
s.progress(jobID, res.TablesSynced, totalTables, "", "同步失败")
|
||||
return res
|
||||
}
|
||||
|
||||
30
internal/sync/sync_events.go
Normal file
30
internal/sync/sync_events.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package sync
|
||||
|
||||
const (
|
||||
EventSyncStart = "sync:start"
|
||||
EventSyncProgress = "sync:progress"
|
||||
EventSyncLog = "sync:log"
|
||||
EventSyncDone = "sync:done"
|
||||
)
|
||||
|
||||
type SyncLogEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Level string `json:"level"` // info/warn/error
|
||||
Message string `json:"message"`
|
||||
Ts int64 `json:"ts"` // Unix milli
|
||||
}
|
||||
|
||||
type SyncProgressEvent struct {
|
||||
JobID string `json:"jobId"`
|
||||
Percent int `json:"percent"`
|
||||
Current int `json:"current"` // 已完成表数
|
||||
Total int `json:"total"` // 总表数
|
||||
Table string `json:"table,omitempty"`
|
||||
Stage string `json:"stage,omitempty"`
|
||||
}
|
||||
|
||||
type Reporter struct {
|
||||
OnLog func(event SyncLogEvent)
|
||||
OnProgress func(event SyncProgressEvent)
|
||||
}
|
||||
|
||||
13
internal/sync/table_options.go
Normal file
13
internal/sync/table_options.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package sync
|
||||
|
||||
// TableOptions controls which operations to apply per table, and optional row selection.
|
||||
// 注意:如未指定 Selected*PKs,则表示“同步全部该类型差异数据”;如指定为空数组,则同样表示全部。
|
||||
type TableOptions struct {
|
||||
Insert bool `json:"insert,omitempty"`
|
||||
Update bool `json:"update,omitempty"`
|
||||
Delete bool `json:"delete,omitempty"`
|
||||
|
||||
SelectedInsertPKs []string `json:"selectedInsertPks,omitempty"`
|
||||
SelectedUpdatePKs []string `json:"selectedUpdatePks,omitempty"`
|
||||
SelectedDeletePKs []string `json:"selectedDeletePks,omitempty"`
|
||||
}
|
||||
Reference in New Issue
Block a user