Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(dlm): incremenntal table structure synchronization #2189

Merged
merged 5 commits into from
May 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ public Job createJob(DlmTask parameters) {
logicTableConfig.setReaderTaskCount((int) (singleTaskThreadPoolSize * readWriteRatio / (1 + readWriteRatio)));
logicTableConfig.setWriterTaskCount(singleTaskThreadPoolSize - logicTableConfig.getReaderTaskCount());
logicTableConfig.setGeneratorBatchSize(defaultScanBatchSize);
DataSourceInfo sourceInfo = DataSourceInfoBuilder.build(parameters.getSourceDs());
DataSourceInfo targetInfo = DataSourceInfoBuilder.build(parameters.getTargetDs());
DataSourceInfo sourceInfo = DataSourceInfoMapper.toDataSourceInfo(parameters.getSourceDs());
DataSourceInfo targetInfo = DataSourceInfoMapper.toDataSourceInfo(parameters.getTargetDs());
sourceInfo.setConnectionCount(2 * (logicTableConfig.getReaderTaskCount()
+ parameters.getLogicTableConfig().getWriterTaskCount()));
targetInfo.setConnectionCount(2 * (logicTableConfig.getReaderTaskCount()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
/*
* Copyright (c) 2023 OceanBase.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.oceanbase.odc.service.dlm;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

import javax.sql.DataSource;

import com.oceanbase.odc.common.util.StringUtils;
import com.oceanbase.odc.service.connection.model.ConnectionConfig;
import com.oceanbase.odc.service.session.factory.DruidDataSourceFactory;
import com.oceanbase.odc.service.structurecompare.DefaultDBStructureComparator;
import com.oceanbase.odc.service.structurecompare.model.DBObjectComparisonResult;
import com.oceanbase.odc.service.structurecompare.model.DBStructureComparisonConfig;
import com.oceanbase.tools.dbbrowser.model.DBObjectType;

import lombok.extern.slf4j.Slf4j;

/**
* @Author:tinker
* @Date: 2024/4/9 16:39
* @Descripition:
*/
@Slf4j
public class DLMTableStructureSynchronizer {

public static void sync(ConnectionConfig sourceConnectionConfig, ConnectionConfig targetConnectionConfig,
String tableName, Set<DBObjectType> targetType) throws SQLException {
HashSet<String> tableNames = new HashSet<>();
tableNames.add(tableName);
sync(sourceConnectionConfig, targetConnectionConfig, tableNames, targetType);
}

public static void sync(ConnectionConfig sourceConnectionConfig, ConnectionConfig targetConnectionConfig,
Set<String> tableNames, Set<DBObjectType> targetType) throws SQLException {
DBStructureComparisonConfig sourceConfig = initDBStructureComparisonConfig(
sourceConnectionConfig, tableNames);
DBStructureComparisonConfig targetConfig = initDBStructureComparisonConfig(
targetConnectionConfig, tableNames);
DefaultDBStructureComparator comparator = new DefaultDBStructureComparator();
List<DBObjectComparisonResult> results = comparator.compare(sourceConfig, targetConfig).stream()
.peek(o -> o.setSubDBObjectComparisonResult(o.getSubDBObjectComparisonResult().stream()
.filter(sb -> targetType.contains(sb.getDbObjectType())).collect(
Collectors.toList())))
.collect(Collectors.toList());
try (Connection conn = targetConfig.getDataSource().getConnection()) {
for (DBObjectComparisonResult result : results) {
String changeSqlScript =
result.toEntity(1L, targetConnectionConfig.getDialectType()).getChangeSqlScript();
if (StringUtils.isNotEmpty(changeSqlScript)) {
log.info("Start to sync target table structure,sqls={}", changeSqlScript);
conn.prepareStatement(changeSqlScript).execute();
log.info("Sync table structure success.");
} else {
log.info("Table structure comparison has finished,no action is necessary.");
}
}
} catch (Exception e) {
log.warn("Sync table structure failed!", e);
}
closeDataSource(sourceConfig.getDataSource());
closeDataSource(targetConfig.getDataSource());
}

private static DBStructureComparisonConfig initDBStructureComparisonConfig(ConnectionConfig config,
Set<String> tableNames) {
DBStructureComparisonConfig returnValue = new DBStructureComparisonConfig();
returnValue.setSchemaName(config.getDefaultSchema());
returnValue.setConnectType(config.getType());
returnValue.setDataSource(new DruidDataSourceFactory(config).getDataSource());
returnValue.setToComparedObjectTypes(Collections.singleton(DBObjectType.TABLE));
Map<DBObjectType, Set<String>> blackListMap = new HashMap<>();
blackListMap.put(DBObjectType.TABLE, new HashSet<>(tableNames));
returnValue.setBlackListMap(blackListMap);
return returnValue;
}

private static void closeDataSource(DataSource dataSource) {
if (dataSource instanceof AutoCloseable) {
try {
((AutoCloseable) dataSource).close();
} catch (Exception e) {
log.warn("Structure comparison failed to close dataSource!", e);
}
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package com.oceanbase.odc.service.dlm;

import com.oceanbase.odc.common.util.StringUtils;
import com.oceanbase.odc.core.shared.constant.ConnectType;
import com.oceanbase.odc.core.shared.exception.UnsupportedException;
import com.oceanbase.odc.service.connection.model.ConnectionConfig;
import com.oceanbase.odc.service.session.factory.OBConsoleDataSourceFactory;
Expand All @@ -32,9 +33,20 @@
* @Descripition:
*/
@Slf4j
public class DataSourceInfoBuilder {
public class DataSourceInfoMapper {

public static DataSourceInfo build(ConnectionConfig connectionConfig) {
public static ConnectionConfig toConnectionConfig(DataSourceInfo dataSourceInfo) {
ConnectionConfig connectionConfig = new ConnectionConfig();
connectionConfig.setDefaultSchema(dataSourceInfo.getDatabaseName());
connectionConfig.setPassword(dataSourceInfo.getPassword());
connectionConfig.setHost(dataSourceInfo.getIp());
connectionConfig.setPort(dataSourceInfo.getPort());
connectionConfig.setUsername(dataSourceInfo.getFullUserName());
connectionConfig.setType(ConnectType.valueOf(dataSourceInfo.getDatabaseType().name()));
return connectionConfig;
}

public static DataSourceInfo toDataSourceInfo(ConnectionConfig connectionConfig) {
DataSourceInfo dataSourceInfo = new DataSourceInfo();
dataSourceInfo.setDatabaseName(connectionConfig.getDefaultSchema());
dataSourceInfo.setQueryTimeout(connectionConfig.queryTimeoutSeconds());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@
package com.oceanbase.odc.service.dlm.model;

import java.util.List;
import java.util.Set;

import com.oceanbase.odc.core.flow.model.TaskParameters;
import com.oceanbase.tools.dbbrowser.model.DBObjectType;
import com.oceanbase.tools.migrator.common.enums.MigrationInsertAction;
import com.oceanbase.tools.migrator.common.enums.ShardingStrategy;

Expand Down Expand Up @@ -61,6 +63,8 @@ public class DataArchiveParameters implements TaskParameters {

private int scanBatchSize;

private Set<DBObjectType> syncTableStructure;

private MigrationInsertAction migrationInsertAction = MigrationInsertAction.INSERT_NORMAL;

private ShardingStrategy shardingStrategy;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
package com.oceanbase.odc.service.dlm.model;

import java.util.Date;
import java.util.Set;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.oceanbase.odc.core.shared.constant.TaskStatus;
import com.oceanbase.odc.service.connection.model.ConnectionConfig;
import com.oceanbase.tools.dbbrowser.model.DBObjectType;
import com.oceanbase.tools.migrator.common.configure.LogicTableConfig;
import com.oceanbase.tools.migrator.common.enums.JobType;

Expand Down Expand Up @@ -53,6 +55,8 @@ public class DlmTask {

private JobType jobType;

private Set<DBObjectType> syncDBObjectTypes;


@JsonIgnore
private ConnectionConfig sourceDs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ public List<DlmTask> splitTask(ScheduleTaskEntity taskEntity) {
taskUnit.setLogicTableConfig(logicTableConfig);
taskUnit.setStatus(TaskStatus.PREPARING);
taskUnit.setJobType(JobType.MIGRATE);
taskUnit.setSyncDBObjectTypes(parameters.getSyncTableStructure());
taskUnits.add(taskUnit);
});
return taskUnits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
package com.oceanbase.odc.service.schedule.job;

import java.util.List;
import java.util.Set;

import com.oceanbase.odc.service.dlm.model.DataArchiveTableConfig;
import com.oceanbase.odc.service.dlm.model.RateLimitConfiguration;
import com.oceanbase.tools.dbbrowser.model.DBObjectType;
import com.oceanbase.tools.migrator.common.configure.DataSourceInfo;
import com.oceanbase.tools.migrator.common.enums.JobType;
import com.oceanbase.tools.migrator.common.enums.MigrationInsertAction;
Expand Down Expand Up @@ -61,6 +63,8 @@ public class DLMJobParameters {

private ShardingStrategy shardingStrategy;

private Set<DBObjectType> syncTableStructure;

private int scanBatchSize;

}
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,21 @@
*/
package com.oceanbase.odc.service.schedule.job;

import java.sql.SQLException;
import java.util.List;

import org.quartz.JobExecutionContext;

import com.oceanbase.odc.common.json.JsonUtils;
import com.oceanbase.odc.common.util.StringUtils;
import com.oceanbase.odc.core.session.ConnectionSession;
import com.oceanbase.odc.core.session.ConnectionSessionConstants;
import com.oceanbase.odc.core.shared.constant.TaskStatus;
import com.oceanbase.odc.metadb.schedule.ScheduleTaskEntity;
import com.oceanbase.odc.service.db.browser.DBSchemaAccessors;
import com.oceanbase.odc.service.dlm.DataSourceInfoBuilder;
import com.oceanbase.odc.service.dlm.DLMTableStructureSynchronizer;
import com.oceanbase.odc.service.dlm.DataSourceInfoMapper;
import com.oceanbase.odc.service.dlm.model.DataArchiveParameters;
import com.oceanbase.odc.service.dlm.model.DataArchiveTableConfig;
import com.oceanbase.odc.service.dlm.model.DlmTask;
import com.oceanbase.odc.service.dlm.utils.DataArchiveConditionUtil;
import com.oceanbase.odc.service.session.factory.DefaultConnectSessionFactory;
import com.oceanbase.tools.dbbrowser.schema.DBSchemaAccessor;
import com.oceanbase.tools.migrator.common.enums.JobType;

import lombok.extern.slf4j.Slf4j;
Expand Down Expand Up @@ -74,7 +71,12 @@ public void executeJob(JobExecutionContext context) {
@Override
public void initTask(DlmTask taskUnit) {
super.initTask(taskUnit);
createTargetTable(taskUnit);
try {
DLMTableStructureSynchronizer.sync(taskUnit.getSourceDs(), taskUnit.getTargetDs(), taskUnit.getTableName(),
taskUnit.getSyncDBObjectTypes());
} catch (SQLException e) {
log.warn("Sync table structure failed,tableName={}", taskUnit.getTableName(), e);
}
}

private void executeInTaskFramework(JobExecutionContext context) {
Expand Down Expand Up @@ -103,16 +105,17 @@ private void executeInTaskFramework(JobExecutionContext context) {
parameters.setShardingStrategy(dataArchiveParameters.getShardingStrategy());
parameters.setScanBatchSize(dataArchiveParameters.getScanBatchSize());
parameters
.setSourceDs(DataSourceInfoBuilder.build(
.setSourceDs(DataSourceInfoMapper.toDataSourceInfo(
databaseService.findDataSourceForConnectById(dataArchiveParameters.getSourceDatabaseId())));
parameters
.setTargetDs(DataSourceInfoBuilder.build(
.setTargetDs(DataSourceInfoMapper.toDataSourceInfo(
databaseService.findDataSourceForConnectById(dataArchiveParameters.getTargetDataBaseId())));
parameters.getSourceDs().setDatabaseName(dataArchiveParameters.getSourceDatabaseName());
parameters.getTargetDs().setDatabaseName(dataArchiveParameters.getTargetDatabaseName());
parameters.getSourceDs().setConnectionCount(2 * (parameters.getReadThreadCount()
+ parameters.getWriteThreadCount()));
parameters.getTargetDs().setConnectionCount(parameters.getSourceDs().getConnectionCount());
parameters.setSyncTableStructure(dataArchiveParameters.getSyncTableStructure());

Long jobId = publishJob(parameters);
scheduleTaskRepository.updateJobIdById(taskEntity.getId(), jobId);
Expand All @@ -122,42 +125,4 @@ private void executeInTaskFramework(JobExecutionContext context) {
jobId);
}


/**
* Create the table in the target database before migrating the data.
*/
private void createTargetTable(DlmTask dlmTask) {

if (dlmTask.getSourceDs().getDialectType() != dlmTask.getTargetDs().getDialectType()) {
log.info("Data sources of different types do not currently support automatic creation of target tables.");
return;
}
DefaultConnectSessionFactory sourceConnectionSessionFactory =
new DefaultConnectSessionFactory(dlmTask.getSourceDs());
ConnectionSession srcSession = sourceConnectionSessionFactory.generateSession();
String tableDDL;
try {
DBSchemaAccessor sourceDsAccessor = DBSchemaAccessors.create(srcSession);
tableDDL = sourceDsAccessor.getTableDDL(dlmTask.getSourceDs().getDefaultSchema(), dlmTask.getTableName());
} finally {
srcSession.expire();
}

DefaultConnectSessionFactory targetConnectionSessionFactory =
new DefaultConnectSessionFactory(dlmTask.getTargetDs());
ConnectionSession targetSession = targetConnectionSessionFactory.generateSession();
try {
DBSchemaAccessor targetDsAccessor = DBSchemaAccessors.create(targetSession);
List<String> tableNames = targetDsAccessor.showTables(dlmTask.getTargetDs().getDefaultSchema());
if (tableNames.contains(dlmTask.getTableName())) {
log.info("Target table exist,tableName={}", dlmTask.getTableName());
return;
}
log.info("Begin to create target table...");
targetSession.getSyncJdbcExecutor(ConnectionSessionConstants.CONSOLE_DS_KEY).execute(tableDDL);
} finally {
targetSession.expire();
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import com.oceanbase.odc.common.util.StringUtils;
import com.oceanbase.odc.core.shared.constant.TaskStatus;
import com.oceanbase.odc.metadb.schedule.ScheduleTaskEntity;
import com.oceanbase.odc.service.dlm.DataSourceInfoBuilder;
import com.oceanbase.odc.service.dlm.DataSourceInfoMapper;
import com.oceanbase.odc.service.dlm.model.DataArchiveTableConfig;
import com.oceanbase.odc.service.dlm.model.DataDeleteParameters;
import com.oceanbase.odc.service.dlm.model.DlmTask;
Expand Down Expand Up @@ -121,10 +121,10 @@ private void executeInTaskFramework(JobExecutionContext context) {
parameters.setReadThreadCount(dataDeleteParameters.getReadThreadCount());
parameters.setScanBatchSize(dataDeleteParameters.getScanBatchSize());
parameters
.setSourceDs(DataSourceInfoBuilder.build(
.setSourceDs(DataSourceInfoMapper.toDataSourceInfo(
databaseService.findDataSourceForConnectById(dataDeleteParameters.getDatabaseId())));
parameters
.setTargetDs(DataSourceInfoBuilder.build(
.setTargetDs(DataSourceInfoMapper.toDataSourceInfo(
databaseService.findDataSourceForConnectById(dataDeleteParameters.getDatabaseId())));
parameters.getSourceDs().setDatabaseName(dataDeleteParameters.getDatabaseName());
parameters.getTargetDs().setDatabaseName(dataDeleteParameters.getDatabaseName());
Expand Down
Loading
Loading