feat(sync): 新增 SQL 结果集数据同步能力

- 同步引擎新增查询结果集同步分支,支持单目标表差异分析、预览与执行
- 数据同步工作台增加 SQL 结果集模式,并补充目标表与查询校验
- 补充后端同步链路与前端请求构造回归测试,并更新 backlog 记录

Fixes #321
This commit is contained in:
Syngnat
2026-04-17 16:31:55 +08:00
parent 9dc58acb39
commit 651eec1617
9 changed files with 957 additions and 46 deletions

View File

@@ -172,6 +172,17 @@
- 处理:将 MySQL 分支拆分为 rename 与 redefine 两条路径。列名发生变化时使用 `CHANGE COLUMN 原列名 新列定义`,其余类型/默认值/注释/自增等普通变更继续走 `MODIFY COLUMN`,保留原有位置子句(`FIRST` / `AFTER`)。
- 验证:补充 `frontend/src/components/tableDesignerSchemaSql.test.ts` 回归测试,覆盖 MySQL 重命名列时必须生成 `CHANGE COLUMN` 而不是 `MODIFY COLUMN`,并执行 `frontend``npm exec vitest run src/components/tableDesignerSchemaSql.test.ts``npm run build`
### #375
- 复核结论:该问题已在 `origin/dev` 落地,不应继续作为待修复 backlog 处理。
- 已有关联提交:`7378966 fix(mysql): 表列表排除视图 refs bug#375``c631fee fix(ui): 表概览排除视图 refs bug#375`
- 后续动作:本地重复修复提交不计入有效成果,整理分支时剔除;后续 issue 一律先核对 `gh` timeline 与 `origin/dev` 关联提交,再决定是否动手。
### #321
- 根因:现有数据同步链路只支持“按源表列表”推进,前端无法录入源 SQL后端 `Analyze / Preview / RunSync` 也默认从源表 `SELECT *` 读取数据,不能把查询结果集当作同步源。
- 处理:新增 `sourceQuery` 同步分支。前端 `DataSyncModal` 增加“按 SQL 结果集同步”模式,限定为“源 SQL -> 单个已存在目标表”;后端在 `Analyze / Preview / RunSync` 中直接执行源 SQL并按目标表主键复用现有差异计算、预览与应用逻辑。
- 验证:新增 `internal/sync/source_query_sync_test.go``frontend/src/components/dataSyncRequest.test.ts`,并执行 `go test ./internal/sync -count=1``frontend``npm exec vitest run src/components/dataSyncRequest.test.ts``npm run build`
### #330
- 根因:查询结果表格已经支持拖拽调整列宽,但 resize handle 没有提供双击自适应逻辑,导致用户只能靠手工拖拽慢慢试宽度。

View File

@@ -8,10 +8,12 @@ import { EventsOn } from '../../wailsjs/runtime/runtime';
import { normalizeOpacityForPlatform, resolveAppearanceValues } from '../utils/appearance';
import { buildRpcConnectionConfig } from '../utils/connectionRpcConfig';
import { formatLocalDateTimeLiteral, normalizeTemporalLiteralText } from './dataGridCopyInsert';
import { buildDataSyncRequest, type SourceDatasetMode, validateDataSyncSelection } from './dataSyncRequest';
const { Title, Text } = Typography;
const { Step } = Steps;
const { Option } = Select;
const { TextArea } = Input;
type SyncLogEvent = { jobId: string; level?: string; message?: string; ts?: number };
type SyncProgressEvent = { jobId: string; percent?: number; current?: number; total?: number; table?: string; stage?: string };
@@ -213,6 +215,8 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
// Step 2: Tables
const [allTables, setAllTables] = useState<string[]>([]);
const [selectedTables, setSelectedTables] = useState<string[]>([]);
const [sourceDatasetMode, setSourceDatasetMode] = useState<SourceDatasetMode>('table');
const [sourceQuery, setSourceQuery] = useState<string>('');
// Options
const [workflowType, setWorkflowType] = useState<WorkflowType>('sync');
@@ -293,7 +297,10 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
setTargetConnId('');
setSourceDb('');
setTargetDb('');
setAllTables([]);
setSelectedTables([]);
setSourceDatasetMode('table');
setSourceQuery('');
setWorkflowType('sync');
setSyncContent('data');
setSyncMode('insert_update');
@@ -341,6 +348,28 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
}
}, [workflowType]);
useEffect(() => {
if (sourceDatasetMode !== 'query') return;
if (workflowType !== 'sync') {
setWorkflowType('sync');
}
if (syncContent !== 'data') {
setSyncContent('data');
}
if (targetTableStrategy !== 'existing_only') {
setTargetTableStrategy('existing_only');
}
if (createIndexes) {
setCreateIndexes(false);
}
if (autoAddColumns) {
setAutoAddColumns(false);
}
if (selectedTables.length > 1) {
setSelectedTables(selectedTables.slice(0, 1));
}
}, [sourceDatasetMode, workflowType, syncContent, targetTableStrategy, createIndexes, autoAddColumns, selectedTables]);
const handleSourceConnChange = async (connId: string) => {
setSourceConnId(connId);
setSourceDb('');
@@ -386,10 +415,12 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
setLoading(true);
try {
const conn = connections.find(c => c.id === sourceConnId);
const connId = isSourceQueryMode ? targetConnId : sourceConnId;
const dbName = isSourceQueryMode ? targetDb : sourceDb;
const conn = connections.find(c => c.id === connId);
if (conn) {
const config = normalizeConnConfig(conn, sourceDb);
const res = await DBGetTables(config as any, sourceDb);
const config = normalizeConnConfig(conn, dbName);
const res = await DBGetTables(config as any, dbName);
if (res.success) {
// DBGetTables returns [{Table: "name"}, ...]
const tableRows = Array.isArray(res.data) ? res.data : [];
@@ -397,6 +428,13 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
.map((row: any) => row?.Table || row?.table || row?.TABLE_NAME || Object.values(row || {})[0])
.filter((name: any) => typeof name === 'string' && name.trim() !== '');
setAllTables(tables as string[]);
setSelectedTables(prev => {
const existing = prev.filter((name) => tables.includes(name));
if (isSourceQueryMode) {
return existing.slice(0, 1);
}
return existing;
});
setCurrentStep(1);
} else {
message.error(res.message);
@@ -414,7 +452,8 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
};
const analyzeDiff = async () => {
if (selectedTables.length === 0) return;
const selectionError = validateDataSyncSelection({ sourceDatasetMode, selectedTables, sourceQuery, syncContent });
if (selectionError) return message.error(selectionError);
if (!sourceConnId || !targetConnId) return message.error("Select connections first");
if (!sourceDb || !targetDb) return message.error("Select databases first");
@@ -431,18 +470,20 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
autoScrollRef.current = true;
setSyncProgress({ percent: 0, current: 0, total: selectedTables.length, table: '', stage: '差异分析' });
const config = {
const config = buildDataSyncRequest({
sourceConfig: normalizeConnConfig(sConn, sourceDb),
targetConfig: normalizeConnConfig(tConn, targetDb),
tables: selectedTables,
content: syncContent,
mode: "insert_update",
selectedTables,
sourceDatasetMode,
sourceQuery,
syncContent,
syncMode: "insert_update",
autoAddColumns,
targetTableStrategy,
createIndexes,
mongoCollectionName: mongoCollectionName.trim(),
mongoCollectionName,
jobId,
};
});
try {
const res = await DataSyncAnalyze(config as any);
@@ -484,17 +525,19 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
setPreviewLoading(true);
setPreviewData(null);
const config = {
const config = buildDataSyncRequest({
sourceConfig: normalizeConnConfig(sConn, sourceDb),
targetConfig: normalizeConnConfig(tConn, targetDb),
tables: selectedTables,
content: syncContent,
mode: "insert_update",
selectedTables,
sourceDatasetMode,
sourceQuery,
syncContent,
syncMode: "insert_update",
autoAddColumns,
targetTableStrategy,
createIndexes,
mongoCollectionName: mongoCollectionName.trim(),
};
mongoCollectionName,
});
try {
const res = await DataSyncPreview(config as any, table, 200);
@@ -511,6 +554,11 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
};
const runSync = async () => {
const selectionError = validateDataSyncSelection({ sourceDatasetMode, selectedTables, sourceQuery, syncContent });
if (selectionError) {
message.error(selectionError);
return;
}
if (syncContent !== 'schema' && diffTables.length === 0) {
message.error("请先对比差异,再开始同步");
return;
@@ -549,19 +597,21 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
stage: '准备开始',
});
const config = {
const config = buildDataSyncRequest({
sourceConfig: normalizeConnConfig(sConn, sourceDb),
targetConfig: normalizeConnConfig(tConn, targetDb),
tables: selectedTables,
content: syncContent,
mode: syncMode,
selectedTables,
sourceDatasetMode,
sourceQuery,
syncContent,
syncMode,
autoAddColumns,
targetTableStrategy,
createIndexes,
mongoCollectionName: mongoCollectionName.trim(),
mongoCollectionName,
tableOptions,
jobId,
};
});
try {
const res = await DataSync(config as any);
@@ -627,6 +677,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
return Array.from(new Set(items));
}, [diffTables]);
const isSourceQueryMode = sourceDatasetMode === 'query';
const isMigrationWorkflow = workflowType === 'migration';
const sourceConn = useMemo(() => connections.find(c => c.id === sourceConnId), [connections, sourceConnId]);
const targetConn = useMemo(() => connections.find(c => c.id === targetConnId), [connections, targetConnId]);
@@ -859,7 +910,13 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
<Form.Item label="功能类型">
<Select value={workflowType} onChange={setWorkflowType}>
<Option value="sync"></Option>
<Option value="migration"></Option>
<Option value="migration" disabled={isSourceQueryMode}></Option>
</Select>
</Form.Item>
<Form.Item label="源数据方式">
<Select value={sourceDatasetMode} onChange={setSourceDatasetMode}>
<Option value="table"></Option>
<Option value="query"> SQL </Option>
</Select>
</Form.Item>
<Alert
@@ -870,11 +927,19 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
? '当前为“跨库迁移”模式:适合将表迁移到另一数据源,可自动建表并导入数据。'
: '当前为“数据同步”模式:适合目标表已存在时做增量同步或覆盖导入。'}
/>
{isSourceQueryMode && (
<Alert
type="info"
showIcon
style={{ marginBottom: 12 }}
message="SQL 结果集同步当前只支持:源端自定义 SQL -> 单个已存在目标表;查询结果需包含目标表主键列。"
/>
)}
<Form.Item label={isMigrationWorkflow ? '迁移内容' : '同步内容'}>
<Select value={syncContent} onChange={setSyncContent}>
<Option value="data"></Option>
<Option value="schema"></Option>
<Option value="both"> + </Option>
<Option value="schema" disabled={isSourceQueryMode}></Option>
<Option value="both" disabled={isSourceQueryMode}> + </Option>
</Select>
</Form.Item>
<Form.Item label={isMigrationWorkflow ? '迁移模式' : '同步模式'}>
@@ -885,7 +950,7 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
</Select>
</Form.Item>
<Form.Item label={isMigrationWorkflow ? '目标表处理策略' : '目标表要求'}>
<Select value={targetTableStrategy} onChange={setTargetTableStrategy} disabled={!isMigrationWorkflow}>
<Select value={targetTableStrategy} onChange={setTargetTableStrategy} disabled={!isMigrationWorkflow || isSourceQueryMode}>
<Option value="existing_only">使</Option>
<Option value="auto_create_if_missing"></Option>
<Option value="smart"></Option>
@@ -908,12 +973,12 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
</Form.Item>
)}
<Form.Item>
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)}>
MySQL MySQL Kingbase
<Checkbox checked={autoAddColumns} onChange={(e) => setAutoAddColumns(e.target.checked)} disabled={isSourceQueryMode}>
MySQL MySQL KingbaseSQL
</Checkbox>
</Form.Item>
<Form.Item>
<Checkbox checked={createIndexes} onChange={(e) => setCreateIndexes(e.target.checked)} disabled={!isMigrationWorkflow || targetTableStrategy === 'existing_only'}>
<Checkbox checked={createIndexes} onChange={(e) => setCreateIndexes(e.target.checked)} disabled={!isMigrationWorkflow || targetTableStrategy === 'existing_only' || isSourceQueryMode}>
/
</Checkbox>
</Form.Item>
@@ -949,21 +1014,56 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
{currentStep === 1 && (
<div style={{ display: 'flex', flexDirection: 'column', gap: 14 }}>
<div style={quietPanelStyle}>
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: 10 }}>
<Text type="secondary"></Text>
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
</Checkbox>
</div>
<Transfer
dataSource={allTables.map(t => ({ key: t, title: t }))}
titles={['源表', '已选表']}
targetKeys={selectedTables}
onChange={(keys) => setSelectedTables(keys as string[])}
render={item => item.title}
listStyle={{ width: 390, height: 320, marginTop: 0, borderRadius: 14, overflow: 'hidden' }}
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表…', notFoundContent: '暂无数据' }}
/>
{!isSourceQueryMode && (
<>
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', marginBottom: 10 }}>
<Text type="secondary"></Text>
<Checkbox checked={showSameTables} onChange={(e) => setShowSameTables(e.target.checked)}>
</Checkbox>
</div>
<Transfer
dataSource={allTables.map(t => ({ key: t, title: t }))}
titles={['源表', '已选表']}
targetKeys={selectedTables}
onChange={(keys) => setSelectedTables(keys as string[])}
render={item => item.title}
listStyle={{ width: 390, height: 320, marginTop: 0, borderRadius: 14, overflow: 'hidden' }}
locale={{ itemUnit: '项', itemsUnit: '项', searchPlaceholder: '搜索表…', notFoundContent: '暂无数据' }}
/>
</>
)}
{isSourceQueryMode && (
<Form layout="vertical">
<Alert
type="info"
showIcon
style={{ marginBottom: 12 }}
message="请输入源查询 SQL并选择一个目标表。差异分析会直接基于该结果集与目标表对比。"
/>
<Form.Item label="源查询 SQL">
<TextArea
value={sourceQuery}
onChange={(e) => setSourceQuery(e.target.value)}
rows={8}
placeholder="例如SELECT id, name, email FROM users WHERE status = 'active'"
spellCheck={false}
/>
</Form.Item>
<Form.Item label="目标表">
<Select
value={selectedTables[0]}
onChange={(value) => setSelectedTables(value ? [value] : [])}
showSearch
allowClear
placeholder="请选择一个目标表"
optionFilterProp="children"
>
{allTables.map((table) => <Option key={table} value={table}>{table}</Option>)}
</Select>
</Form.Item>
</Form>
)}
</div>
{diffTables.length > 0 && (
@@ -1156,14 +1256,14 @@ const DataSyncModal: React.FC<{ open: boolean; onClose: () => void }> = ({ open,
{currentStep === 1 && (
<>
<Button onClick={() => setCurrentStep(0)} style={{ marginRight: 8 }}></Button>
<Button onClick={analyzeDiff} loading={loading} disabled={syncContent === 'schema' || selectedTables.length === 0 || analyzing} style={{ marginRight: 8 }}>
<Button onClick={analyzeDiff} loading={loading} disabled={syncContent === 'schema' || selectedTables.length === 0 || analyzing || (isSourceQueryMode && !sourceQuery.trim())} style={{ marginRight: 8 }}>
</Button>
<Button
type="primary"
onClick={runSync}
loading={loading}
disabled={selectedTables.length === 0 || (syncContent !== 'schema' && diffTables.length === 0)}
disabled={selectedTables.length === 0 || (isSourceQueryMode && !sourceQuery.trim()) || (syncContent !== 'schema' && diffTables.length === 0)}
>
</Button>

View File

@@ -0,0 +1,67 @@
import { describe, expect, it } from 'vitest';
import { buildDataSyncRequest, validateDataSyncSelection } from './dataSyncRequest';
describe('validateDataSyncSelection', () => {
it('requires source query and single target table in query mode', () => {
expect(validateDataSyncSelection({
sourceDatasetMode: 'query',
selectedTables: [],
sourceQuery: '',
syncContent: 'data',
})).toBe('请输入源查询 SQL');
expect(validateDataSyncSelection({
sourceDatasetMode: 'query',
selectedTables: [],
sourceQuery: 'select 1',
syncContent: 'data',
})).toBe('SQL 结果集同步需要选择一个目标表');
expect(validateDataSyncSelection({
sourceDatasetMode: 'query',
selectedTables: ['users', 'orders'],
sourceQuery: 'select 1',
syncContent: 'data',
})).toBe('SQL 结果集同步需要选择一个目标表');
});
it('forces data-only in query mode', () => {
expect(validateDataSyncSelection({
sourceDatasetMode: 'query',
selectedTables: ['users'],
sourceQuery: 'select 1',
syncContent: 'both',
})).toBe('SQL 结果集同步仅支持仅同步数据');
});
});
describe('buildDataSyncRequest', () => {
it('normalizes query mode payload for backend', () => {
const payload = buildDataSyncRequest({
sourceConfig: { type: 'mysql' },
targetConfig: { type: 'mysql' },
selectedTables: ['users'],
sourceDatasetMode: 'query',
sourceQuery: ' SELECT id, name FROM active_users ',
syncContent: 'both',
syncMode: 'insert_update',
autoAddColumns: true,
targetTableStrategy: 'smart',
createIndexes: true,
mongoCollectionName: ' ',
jobId: 'job-1',
tableOptions: { users: { insert: true, update: true, delete: false } },
});
expect(payload).toMatchObject({
tables: ['users'],
sourceQuery: 'SELECT id, name FROM active_users',
content: 'data',
mode: 'insert_update',
autoAddColumns: false,
targetTableStrategy: 'existing_only',
createIndexes: false,
jobId: 'job-1',
});
});
});

View File

@@ -0,0 +1,85 @@
export type SourceDatasetMode = 'table' | 'query';
type SyncContent = 'data' | 'schema' | 'both';
type TargetTableStrategy = 'existing_only' | 'auto_create_if_missing' | 'smart';
type BuildDataSyncRequestParams = {
sourceConfig: any;
targetConfig: any;
selectedTables: string[];
sourceDatasetMode: SourceDatasetMode;
sourceQuery: string;
syncContent: SyncContent;
syncMode: string;
autoAddColumns: boolean;
targetTableStrategy: TargetTableStrategy;
createIndexes: boolean;
mongoCollectionName: string;
jobId?: string;
tableOptions?: Record<string, any>;
};
type ValidateDataSyncSelectionParams = {
sourceDatasetMode: SourceDatasetMode;
selectedTables: string[];
sourceQuery: string;
syncContent: SyncContent;
};
export const validateDataSyncSelection = ({
sourceDatasetMode,
selectedTables,
sourceQuery,
syncContent,
}: ValidateDataSyncSelectionParams): string | null => {
if (sourceDatasetMode === 'query') {
if (!String(sourceQuery || '').trim()) {
return '请输入源查询 SQL';
}
if (selectedTables.length !== 1) {
return 'SQL 结果集同步需要选择一个目标表';
}
if (syncContent !== 'data') {
return 'SQL 结果集同步仅支持仅同步数据';
}
return null;
}
if (selectedTables.length === 0) {
return '请选择至少一张表';
}
return null;
};
export const buildDataSyncRequest = ({
sourceConfig,
targetConfig,
selectedTables,
sourceDatasetMode,
sourceQuery,
syncContent,
syncMode,
autoAddColumns,
targetTableStrategy,
createIndexes,
mongoCollectionName,
jobId,
tableOptions,
}: BuildDataSyncRequestParams) => {
const isQueryMode = sourceDatasetMode === 'query';
return {
sourceConfig,
targetConfig,
tables: selectedTables,
sourceQuery: isQueryMode ? String(sourceQuery || '').trim() : undefined,
content: isQueryMode ? 'data' : syncContent,
mode: syncMode,
autoAddColumns: isQueryMode ? false : autoAddColumns,
targetTableStrategy: isQueryMode ? 'existing_only' : targetTableStrategy,
createIndexes: isQueryMode ? false : createIndexes,
mongoCollectionName: String(mongoCollectionName || '').trim(),
...(jobId ? { jobId } : {}),
...(tableOptions ? { tableOptions } : {}),
};
};

View File

@@ -39,6 +39,9 @@ func (s *SyncEngine) Analyze(config SyncConfig) SyncAnalyzeResult {
if isMongoToRedisKeyspacePair(config) {
return s.analyzeMongoToRedis(config)
}
if hasSourceQuery(config) {
return s.analyzeSourceQuery(config)
}
contentRaw := strings.ToLower(strings.TrimSpace(config.Content))
syncSchema := false

View File

@@ -46,6 +46,9 @@ func (s *SyncEngine) Preview(config SyncConfig, tableName string, limit int) (Ta
if isMongoToRedisKeyspacePair(config) {
return s.previewMongoToRedis(config, tableName, limit)
}
if hasSourceQuery(config) {
return s.previewSourceQuery(config, limit)
}
sourceDB, err := newSyncDatabase(config.SourceConfig.Type)
if err != nil {

View File

@@ -0,0 +1,461 @@
package sync
import (
"GoNavi-Wails/internal/connection"
"GoNavi-Wails/internal/db"
"fmt"
"strings"
)
type sourceQuerySyncContext struct {
TableName string
TargetSchema string
TargetTable string
TargetQueryTable string
TargetType string
TargetCols []connection.ColumnDefinition
PKColumn string
SourceRows []map[string]interface{}
TargetRows []map[string]interface{}
}
func hasSourceQuery(config SyncConfig) bool {
return strings.TrimSpace(config.SourceQuery) != ""
}
func validateSourceQuerySyncConfig(config SyncConfig) (string, error) {
sourceQuery := strings.TrimSpace(config.SourceQuery)
if sourceQuery == "" {
return "", fmt.Errorf("源查询 SQL 不能为空")
}
content := strings.ToLower(strings.TrimSpace(config.Content))
if content != "" && content != "data" {
return "", fmt.Errorf("SQL 结果集同步当前仅支持“仅同步数据”")
}
if len(config.Tables) != 1 {
return "", fmt.Errorf("SQL 结果集同步要求且仅允许选择一个目标表")
}
tableName := strings.TrimSpace(config.Tables[0])
if tableName == "" {
return "", fmt.Errorf("目标表不能为空")
}
return tableName, nil
}
func resolveTargetQueryTable(config SyncConfig, tableName string) (string, string, string, string) {
targetType := resolveMigrationDBType(config.TargetConfig)
targetSchema, targetTable := normalizeSchemaAndTable(targetType, config.TargetConfig.Database, tableName)
targetQueryTable := qualifiedNameForQuery(targetType, targetSchema, targetTable, tableName)
return targetType, targetSchema, targetTable, targetQueryTable
}
func resolveSinglePKColumn(cols []connection.ColumnDefinition) (string, error) {
pkCols := make([]string, 0, 2)
for _, col := range cols {
if col.Key == "PRI" || col.Key == "PK" {
pkCols = append(pkCols, col.Name)
}
}
if len(pkCols) == 0 {
return "", fmt.Errorf("目标表无主键,不支持基于 SQL 结果集的差异分析")
}
if len(pkCols) > 1 {
return "", fmt.Errorf("目标表为复合主键(%s暂不支持基于 SQL 结果集的差异分析", strings.Join(pkCols, ","))
}
return pkCols[0], nil
}
func loadSourceQuerySyncContext(config SyncConfig, sourceDB db.Database, targetDB db.Database, needTargetRows bool, requirePK bool) (sourceQuerySyncContext, error) {
tableName, err := validateSourceQuerySyncConfig(config)
if err != nil {
return sourceQuerySyncContext{}, err
}
targetType, targetSchema, targetTable, targetQueryTable := resolveTargetQueryTable(config, tableName)
targetCols, err := targetDB.GetColumns(targetSchema, targetTable)
if err != nil {
return sourceQuerySyncContext{}, fmt.Errorf("获取目标表字段失败: %w", err)
}
if len(targetCols) == 0 {
return sourceQuerySyncContext{}, fmt.Errorf("目标表 %s 不存在或未读取到字段定义", tableName)
}
sourceRows, _, err := sourceDB.Query(strings.TrimSpace(config.SourceQuery))
if err != nil {
return sourceQuerySyncContext{}, fmt.Errorf("执行源查询失败: %w", err)
}
ctx := sourceQuerySyncContext{
TableName: tableName,
TargetSchema: targetSchema,
TargetTable: targetTable,
TargetQueryTable: targetQueryTable,
TargetType: targetType,
TargetCols: targetCols,
SourceRows: sourceRows,
TargetRows: make([]map[string]interface{}, 0),
}
if requirePK {
pkColumn, err := resolveSinglePKColumn(targetCols)
if err != nil {
return sourceQuerySyncContext{}, err
}
ctx.PKColumn = pkColumn
}
if needTargetRows {
targetRows, _, err := targetDB.Query(fmt.Sprintf("SELECT * FROM %s", quoteQualifiedIdentByType(targetType, targetQueryTable)))
if err != nil {
return sourceQuerySyncContext{}, fmt.Errorf("读取目标表失败: %w", err)
}
ctx.TargetRows = targetRows
}
return ctx, nil
}
func diffRowsByPK(pkCol string, sourceRows, targetRows []map[string]interface{}) ([]map[string]interface{}, []connection.UpdateRow, []map[string]interface{}, int) {
targetMap := make(map[string]map[string]interface{}, len(targetRows))
for _, row := range targetRows {
if row[pkCol] == nil {
continue
}
pkVal := strings.TrimSpace(fmt.Sprintf("%v", row[pkCol]))
if pkVal == "" || pkVal == "<nil>" {
continue
}
targetMap[pkVal] = row
}
sourcePKSet := make(map[string]struct{}, len(sourceRows))
inserts := make([]map[string]interface{}, 0)
updates := make([]connection.UpdateRow, 0)
same := 0
for _, sourceRow := range sourceRows {
if sourceRow[pkCol] == nil {
continue
}
pkVal := strings.TrimSpace(fmt.Sprintf("%v", sourceRow[pkCol]))
if pkVal == "" || pkVal == "<nil>" {
continue
}
sourcePKSet[pkVal] = struct{}{}
if targetRow, exists := targetMap[pkVal]; exists {
changes := make(map[string]interface{})
for key, value := range sourceRow {
if fmt.Sprintf("%v", value) != fmt.Sprintf("%v", targetRow[key]) {
changes[key] = value
}
}
if len(changes) == 0 {
same++
continue
}
updates = append(updates, connection.UpdateRow{
Keys: map[string]interface{}{pkCol: sourceRow[pkCol]},
Values: changes,
})
continue
}
inserts = append(inserts, sourceRow)
}
deletes := make([]map[string]interface{}, 0)
for pkVal, row := range targetMap {
if _, exists := sourcePKSet[pkVal]; exists {
continue
}
deletes = append(deletes, map[string]interface{}{pkCol: row[pkCol]})
}
return inserts, updates, deletes, same
}
func buildTargetColumnSet(cols []connection.ColumnDefinition) map[string]struct{} {
targetColSet := make(map[string]struct{}, len(cols))
for _, col := range cols {
lowerName := strings.ToLower(strings.TrimSpace(col.Name))
if lowerName == "" {
continue
}
targetColSet[lowerName] = struct{}{}
}
return targetColSet
}
func applyQuerySourceColumnFilter(changeSet connection.ChangeSet, targetCols []connection.ColumnDefinition) connection.ChangeSet {
targetColSet := buildTargetColumnSet(targetCols)
changeSet.Inserts = filterInsertRows(changeSet.Inserts, targetColSet)
changeSet.Updates = filterUpdateRows(changeSet.Updates, targetColSet)
return changeSet
}
func (s *SyncEngine) analyzeSourceQuery(config SyncConfig) SyncAnalyzeResult {
result := SyncAnalyzeResult{Success: true, Tables: []TableDiffSummary{}}
tableName, err := validateSourceQuerySyncConfig(config)
if err != nil {
return SyncAnalyzeResult{Success: false, Message: err.Error()}
}
totalTables := 1
s.progress(config.JobID, 0, totalTables, tableName, "差异分析开始")
sourceDB, err := newSyncDatabase(config.SourceConfig.Type)
if err != nil {
return SyncAnalyzeResult{Success: false, Message: "初始化源数据库驱动失败: " + err.Error()}
}
targetDB, err := newSyncDatabase(config.TargetConfig.Type)
if err != nil {
return SyncAnalyzeResult{Success: false, Message: "初始化目标数据库驱动失败: " + err.Error()}
}
if err := sourceDB.Connect(config.SourceConfig); err != nil {
return SyncAnalyzeResult{Success: false, Message: "源数据库连接失败: " + err.Error()}
}
defer sourceDB.Close()
if err := targetDB.Connect(config.TargetConfig); err != nil {
return SyncAnalyzeResult{Success: false, Message: "目标数据库连接失败: " + err.Error()}
}
defer targetDB.Close()
summary := TableDiffSummary{
Table: tableName,
CanSync: false,
}
ctx, err := loadSourceQuerySyncContext(config, sourceDB, targetDB, true, true)
if err != nil {
summary.Message = err.Error()
result.Tables = append(result.Tables, summary)
result.Message = "已完成 1 个目标表的差异分析"
s.progress(config.JobID, totalTables, totalTables, tableName, "差异分析完成")
return result
}
inserts, updates, deletes, same := diffRowsByPK(ctx.PKColumn, ctx.SourceRows, ctx.TargetRows)
summary.CanSync = true
summary.PKColumn = ctx.PKColumn
summary.Inserts = len(inserts)
summary.Updates = len(updates)
summary.Deletes = len(deletes)
summary.Same = same
summary.TargetTableExists = true
summary.Message = "SQL 结果集差异分析完成"
result.Tables = append(result.Tables, summary)
result.Message = "已完成 1 个目标表的差异分析"
s.progress(config.JobID, totalTables, totalTables, tableName, "差异分析完成")
return result
}
func (s *SyncEngine) previewSourceQuery(config SyncConfig, limit int) (TableDiffPreview, error) {
sourceDB, err := newSyncDatabase(config.SourceConfig.Type)
if err != nil {
return TableDiffPreview{}, fmt.Errorf("初始化源数据库驱动失败: %w", err)
}
targetDB, err := newSyncDatabase(config.TargetConfig.Type)
if err != nil {
return TableDiffPreview{}, fmt.Errorf("初始化目标数据库驱动失败: %w", err)
}
if err := sourceDB.Connect(config.SourceConfig); err != nil {
return TableDiffPreview{}, fmt.Errorf("源数据库连接失败: %w", err)
}
defer sourceDB.Close()
if err := targetDB.Connect(config.TargetConfig); err != nil {
return TableDiffPreview{}, fmt.Errorf("目标数据库连接失败: %w", err)
}
defer targetDB.Close()
ctx, err := loadSourceQuerySyncContext(config, sourceDB, targetDB, true, true)
if err != nil {
return TableDiffPreview{}, err
}
inserts, updates, deletes, _ := diffRowsByPK(ctx.PKColumn, ctx.SourceRows, ctx.TargetRows)
out := TableDiffPreview{
Table: ctx.TableName,
PKColumn: ctx.PKColumn,
ColumnTypes: make(map[string]string, len(ctx.TargetCols)),
SchemaSummary: "SQL 结果集同步预览",
TotalInserts: len(inserts),
TotalUpdates: len(updates),
TotalDeletes: len(deletes),
Inserts: make([]PreviewRow, 0, minInt(limit, len(inserts))),
Updates: make([]PreviewUpdateRow, 0, minInt(limit, len(updates))),
Deletes: make([]PreviewRow, 0, minInt(limit, len(deletes))),
}
for _, col := range ctx.TargetCols {
name := strings.ToLower(strings.TrimSpace(col.Name))
typ := strings.TrimSpace(col.Type)
if name == "" || typ == "" {
continue
}
out.ColumnTypes[name] = typ
}
for idx, row := range inserts {
if idx >= limit {
break
}
pk := strings.TrimSpace(fmt.Sprintf("%v", row[ctx.PKColumn]))
out.Inserts = append(out.Inserts, PreviewRow{PK: pk, Row: row})
}
for idx, update := range updates {
if idx >= limit {
break
}
pk := strings.TrimSpace(fmt.Sprintf("%v", update.Keys[ctx.PKColumn]))
targetRow := map[string]interface{}{}
for _, row := range ctx.TargetRows {
if fmt.Sprintf("%v", row[ctx.PKColumn]) == fmt.Sprintf("%v", update.Keys[ctx.PKColumn]) {
targetRow = row
break
}
}
sourceRow := map[string]interface{}{}
for _, row := range ctx.SourceRows {
if fmt.Sprintf("%v", row[ctx.PKColumn]) == fmt.Sprintf("%v", update.Keys[ctx.PKColumn]) {
sourceRow = row
break
}
}
changedColumns := make([]string, 0, len(update.Values))
for column := range update.Values {
changedColumns = append(changedColumns, column)
}
out.Updates = append(out.Updates, PreviewUpdateRow{
PK: pk,
ChangedColumns: changedColumns,
Source: sourceRow,
Target: targetRow,
})
}
for idx, row := range deletes {
if idx >= limit {
break
}
pk := strings.TrimSpace(fmt.Sprintf("%v", row[ctx.PKColumn]))
out.Deletes = append(out.Deletes, PreviewRow{PK: pk, Row: row})
}
return out, nil
}
func (s *SyncEngine) runSourceQuerySync(config SyncConfig) SyncResult {
result := SyncResult{Success: true, Logs: []string{}}
tableName, err := validateSourceQuerySyncConfig(config)
if err != nil {
return s.fail(config.JobID, 1, result, err.Error())
}
totalTables := 1
tableMode := normalizeSyncMode(config.Mode)
s.progress(config.JobID, 0, totalTables, tableName, "开始同步")
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("同步来源SQL 结果集 -> 目标表 %s模式%s", tableName, tableMode))
sourceDB, err := newSyncDatabase(config.SourceConfig.Type)
if err != nil {
return s.fail(config.JobID, totalTables, result, "初始化源数据库驱动失败: "+err.Error())
}
targetDB, err := newSyncDatabase(config.TargetConfig.Type)
if err != nil {
return s.fail(config.JobID, totalTables, result, "初始化目标数据库驱动失败: "+err.Error())
}
if err := sourceDB.Connect(config.SourceConfig); err != nil {
return s.fail(config.JobID, totalTables, result, "源数据库连接失败: "+err.Error())
}
defer sourceDB.Close()
if err := targetDB.Connect(config.TargetConfig); err != nil {
return s.fail(config.JobID, totalTables, result, "目标数据库连接失败: "+err.Error())
}
defer targetDB.Close()
opts := TableOptions{Insert: true, Update: true, Delete: false}
if config.TableOptions != nil {
if configured, ok := config.TableOptions[tableName]; ok {
opts = configured
}
}
if !opts.Insert && !opts.Update && !opts.Delete {
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("目标表 %s 未勾选任何操作,已跳过", tableName))
s.progress(config.JobID, totalTables, totalTables, tableName, "同步完成")
return result
}
needTargetRows := tableMode == "insert_update"
requirePK := tableMode == "insert_update"
ctx, err := loadSourceQuerySyncContext(config, sourceDB, targetDB, needTargetRows, requirePK)
if err != nil {
return s.fail(config.JobID, totalTables, result, err.Error())
}
inserts := make([]map[string]interface{}, 0)
updates := make([]connection.UpdateRow, 0)
deletes := make([]map[string]interface{}, 0)
if tableMode == "insert_update" {
inserts, updates, deletes, _ = diffRowsByPK(ctx.PKColumn, ctx.SourceRows, ctx.TargetRows)
inserts = filterRowsByPKSelection(ctx.PKColumn, inserts, opts.Insert, opts.SelectedInsertPKs)
updates = filterUpdatesByPKSelection(ctx.PKColumn, updates, opts.Update, opts.SelectedUpdatePKs)
deletes = filterRowsByPKSelection(ctx.PKColumn, deletes, opts.Delete, opts.SelectedDeletePKs)
} else {
inserts = ctx.SourceRows
if !opts.Insert {
inserts = nil
}
if tableMode == "full_overwrite" {
s.progress(config.JobID, 0, totalTables, tableName, "清空目标表")
clearSQL := fmt.Sprintf("DELETE FROM %s", quoteQualifiedIdentByType(ctx.TargetType, ctx.TargetQueryTable))
if ctx.TargetType == "mysql" {
clearSQL = fmt.Sprintf("TRUNCATE TABLE %s", quoteQualifiedIdentByType(ctx.TargetType, ctx.TargetQueryTable))
}
if _, err := targetDB.Exec(clearSQL); err != nil {
return s.fail(config.JobID, totalTables, result, "清空目标表失败: "+err.Error())
}
}
}
changeSet := applyQuerySourceColumnFilter(connection.ChangeSet{
Inserts: inserts,
Updates: updates,
Deletes: deletes,
}, ctx.TargetCols)
if len(changeSet.Inserts) == 0 && len(changeSet.Updates) == 0 && len(changeSet.Deletes) == 0 {
s.appendLog(config.JobID, &result, "info", "SQL 结果集与目标表一致,无需应用变更")
result.TablesSynced++
s.progress(config.JobID, totalTables, totalTables, tableName, "同步完成")
return result
}
applyTableName := ctx.TargetTable
switch ctx.TargetType {
case "postgres", "kingbase", "highgo", "vastbase", "sqlserver":
applyTableName = ctx.TargetQueryTable
}
applier, ok := targetDB.(db.BatchApplier)
if !ok {
return s.fail(config.JobID, totalTables, result, "目标驱动不支持应用数据变更 (ApplyChanges)")
}
if err := applier.ApplyChanges(applyTableName, changeSet); err != nil {
return s.fail(config.JobID, totalTables, result, "应用 SQL 结果集变更失败: "+err.Error())
}
result.TablesSynced++
result.RowsInserted += len(changeSet.Inserts)
result.RowsUpdated += len(changeSet.Updates)
result.RowsDeleted += len(changeSet.Deletes)
s.appendLog(config.JobID, &result, "info", fmt.Sprintf("SQL 结果集同步完成:插入=%d 更新=%d 删除=%d", len(changeSet.Inserts), len(changeSet.Updates), len(changeSet.Deletes)))
s.progress(config.JobID, totalTables, totalTables, tableName, "同步完成")
return result
}
func minInt(a, b int) int {
if a < b {
return a
}
return b
}

View File

@@ -0,0 +1,177 @@
package sync
import (
"GoNavi-Wails/internal/connection"
"GoNavi-Wails/internal/db"
"reflect"
"testing"
)
type fakeQuerySyncTargetDB struct {
fakeMigrationDB
appliedTable string
appliedChanges connection.ChangeSet
}
func (f *fakeQuerySyncTargetDB) ApplyChanges(tableName string, changes connection.ChangeSet) error {
f.appliedTable = tableName
f.appliedChanges = changes
return nil
}
var _ db.BatchApplier = (*fakeQuerySyncTargetDB)(nil)
func TestAnalyze_SourceQueryUsesQueryResultAsSourceDataset(t *testing.T) {
sourceDB := &fakeMigrationDB{
columns: map[string][]connection.ColumnDefinition{
"app.users": {
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
{Name: "name", Type: "varchar(64)", Nullable: "YES"},
},
},
queryData: map[string][]map[string]interface{}{
"SELECT id, name FROM active_users": {
{"id": 1, "name": "Alice New"},
{"id": 2, "name": "Bob"},
},
},
}
targetDB := &fakeQuerySyncTargetDB{
fakeMigrationDB: fakeMigrationDB{
columns: map[string][]connection.ColumnDefinition{
"app.users": {
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
{Name: "name", Type: "varchar(64)", Nullable: "YES"},
},
},
queryData: map[string][]map[string]interface{}{
"SELECT * FROM `app`.`users`": {
{"id": 1, "name": "Alice Old"},
{"id": 3, "name": "Carol"},
},
},
},
}
oldFactory := newSyncDatabase
defer func() { newSyncDatabase = oldFactory }()
callCount := 0
newSyncDatabase = func(dbType string) (db.Database, error) {
callCount++
if callCount == 1 {
return sourceDB, nil
}
return targetDB, nil
}
engine := NewSyncEngine(Reporter{})
result := engine.Analyze(SyncConfig{
SourceConfig: connection.ConnectionConfig{Type: "mysql", Database: "app"},
TargetConfig: connection.ConnectionConfig{Type: "mysql", Database: "app"},
Tables: []string{"users"},
Mode: "insert_update",
SourceQuery: "SELECT id, name FROM active_users",
})
if !result.Success {
t.Fatalf("Analyze 返回失败: %+v", result)
}
if len(result.Tables) != 1 {
t.Fatalf("expected one table summary, got %d", len(result.Tables))
}
summary := result.Tables[0]
if summary.PKColumn != "id" {
t.Fatalf("expected PKColumn=id, got %q", summary.PKColumn)
}
if !summary.CanSync {
t.Fatalf("expected summary can sync, got %+v", summary)
}
if summary.Inserts != 1 || summary.Updates != 1 || summary.Deletes != 1 {
t.Fatalf("unexpected diff summary: %+v", summary)
}
}
func TestRunSync_SourceQueryAppliesDiffAgainstTargetTable(t *testing.T) {
sourceDB := &fakeMigrationDB{
columns: map[string][]connection.ColumnDefinition{
"app.users": {
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
{Name: "name", Type: "varchar(64)", Nullable: "YES"},
},
},
queryData: map[string][]map[string]interface{}{
"SELECT id, name FROM active_users": {
{"id": 1, "name": "Alice New"},
{"id": 2, "name": "Bob"},
},
},
}
targetDB := &fakeQuerySyncTargetDB{
fakeMigrationDB: fakeMigrationDB{
columns: map[string][]connection.ColumnDefinition{
"app.users": {
{Name: "id", Type: "bigint", Nullable: "NO", Key: "PRI"},
{Name: "name", Type: "varchar(64)", Nullable: "YES"},
},
},
queryData: map[string][]map[string]interface{}{
"SELECT * FROM `app`.`users`": {
{"id": 1, "name": "Alice Old"},
{"id": 3, "name": "Carol"},
},
},
},
}
oldFactory := newSyncDatabase
defer func() { newSyncDatabase = oldFactory }()
callCount := 0
newSyncDatabase = func(dbType string) (db.Database, error) {
callCount++
if callCount == 1 {
return sourceDB, nil
}
return targetDB, nil
}
engine := NewSyncEngine(Reporter{})
result := engine.RunSync(SyncConfig{
SourceConfig: connection.ConnectionConfig{Type: "mysql", Database: "app"},
TargetConfig: connection.ConnectionConfig{Type: "mysql", Database: "app"},
Tables: []string{"users"},
Mode: "insert_update",
SourceQuery: "SELECT id, name FROM active_users",
TableOptions: map[string]TableOptions{
"users": {Insert: true, Update: true, Delete: true},
},
})
if !result.Success {
t.Fatalf("RunSync 返回失败: %+v", result)
}
if result.TablesSynced != 1 || result.RowsInserted != 1 || result.RowsUpdated != 1 || result.RowsDeleted != 1 {
t.Fatalf("unexpected sync result: %+v", result)
}
if targetDB.appliedTable != "users" {
t.Fatalf("expected applied table users, got %q", targetDB.appliedTable)
}
wantInserts := []map[string]interface{}{{"id": 2, "name": "Bob"}}
if !reflect.DeepEqual(targetDB.appliedChanges.Inserts, wantInserts) {
t.Fatalf("unexpected inserts: got=%v want=%v", targetDB.appliedChanges.Inserts, wantInserts)
}
wantUpdates := []connection.UpdateRow{{
Keys: map[string]interface{}{"id": 1},
Values: map[string]interface{}{"name": "Alice New"},
}}
if !reflect.DeepEqual(targetDB.appliedChanges.Updates, wantUpdates) {
t.Fatalf("unexpected updates: got=%v want=%v", targetDB.appliedChanges.Updates, wantUpdates)
}
wantDeletes := []map[string]interface{}{{"id": 3}}
if !reflect.DeepEqual(targetDB.appliedChanges.Deletes, wantDeletes) {
t.Fatalf("unexpected deletes: got=%v want=%v", targetDB.appliedChanges.Deletes, wantDeletes)
}
}

View File

@@ -15,6 +15,7 @@ type SyncConfig struct {
SourceConfig connection.ConnectionConfig `json:"sourceConfig"`
TargetConfig connection.ConnectionConfig `json:"targetConfig"`
Tables []string `json:"tables"`
SourceQuery string `json:"sourceQuery,omitempty"`
Content string `json:"content,omitempty"` // "data", "schema", "both"
Mode string `json:"mode"` // "insert_update", "insert_only", "full_overwrite"
JobID string `json:"jobId,omitempty"`
@@ -54,6 +55,9 @@ func (s *SyncEngine) RunSync(config SyncConfig) SyncResult {
if isMongoToRedisKeyspacePair(config) {
return s.runMongoToRedisSync(config, result)
}
if hasSourceQuery(config) {
return s.runSourceQuerySync(config)
}
totalTables := len(config.Tables)
s.progress(config.JobID, 0, totalTables, "", "开始同步")