From dd0240a29f3945400a61536a9b8ff07577bd9aa9 Mon Sep 17 00:00:00 2001 From: CMT Technical User Date: Mon, 12 Feb 2024 15:55:49 +0000 Subject: [PATCH 1/3] Synchronize repository - 2024-02-12 --- .../impl/DefaultDataPipeFactory.java | 20 ++++---- .../impl/DefaultDataWorkerExecutor.java | 2 +- .../DefaultDatabaseCopyTaskRepository.java | 5 +- docs/performance/PERFORMANCE-GUIDE.md | 2 + docs/troubleshooting/TROUBLESHOOTING-GUIDE.md | 48 ++++++++++++------- 5 files changed, 48 insertions(+), 29 deletions(-) diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java index c1ffaf5..a08c110 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java @@ -84,7 +84,7 @@ public DataPipe create(CopyContext context, CopyContext.DataCopyItem it try { pipe.put(MaybeFinished.poison()); } catch (Exception p) { - LOG.error("Cannot contaminate pipe ", p); + LOG.error("Could not close contaminated pipe ", p); } if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); @@ -143,25 +143,29 @@ private void scheduleWorkers(CopyContext context, DataWorkerExecutor wo taskRepository.updateTaskCopyMethod(context, copyItem, DataCopyMethod.OFFSET.toString()); taskRepository.updateTaskKeyColumns(context, copyItem, batchColumns); - List batches = null; + List> batches; if (context.getMigrationContext().isSchedulerResumeEnabled()) { Set pendingBatchesForPipeline = taskRepository .findPendingBatchesForPipeline(context, copyItem); batches = pendingBatchesForPipeline.stream() - .map(b -> Long.valueOf(b.getLowerBoundary().toString())).collect(Collectors.toList()); + .map(b -> Pair.of(Long.valueOf(b.getLowerBoundary().toString()), + Long.valueOf(b.getUpperBoundary().toString()))) + .collect(Collectors.toList()); taskRepository.resetPipelineBatches(context, copyItem); } else { batches = new ArrayList<>(); for (long offset = 0; offset < totalRows; offset += pageSize) { - batches.add(offset); + batches.add(Pair.of(offset, offset + pageSize)); } } + Pair boundaries; for (int batchId = 0; batchId < batches.size(); batchId++) { - long offset = batches.get(batchId); - DataReaderTask dataReaderTask = new BatchOffsetDataReaderTask(pipeTaskContext, batchId, offset, - batchColumns); - taskRepository.scheduleBatch(context, copyItem, batchId, offset, offset + pageSize); + boundaries = batches.get(batchId); + DataReaderTask dataReaderTask = new BatchOffsetDataReaderTask(pipeTaskContext, batchId, + boundaries.getLeft(), batchColumns); + taskRepository.scheduleBatch(context, copyItem, batchId, boundaries.getLeft(), + boundaries.getRight()); workerExecutor.safelyExecute(dataReaderTask); } } else { diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataWorkerExecutor.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataWorkerExecutor.java index 71f75c6..791a1d1 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataWorkerExecutor.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataWorkerExecutor.java @@ -47,7 +47,7 @@ private Future internalSafelyExecute(Callable callable, int rejections) th for (int i = 0; i < rejections; i++) { waitInterval = backOff.nextBackOff(); } - LOG.trace("worker rejected. Retrying in {}ms...", waitInterval); + LOG.trace("Could not fetch new worker, because all are busy. Retrying again in {}ms...", waitInterval); Thread.sleep(waitInterval); return internalSafelyExecute(callable, rejections + 1); } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java index 5350c51..eee5f88 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java @@ -324,11 +324,10 @@ public Set findPendingTasks(CopyContext context) throws Except @Override public Set findFailedTasks(CopyContext context) throws Exception { String sql = "SELECT * FROM " + TABLECOPYTASKS - + " WHERE targetnodeid=? AND migrationid=? AND (duration = '-1' AND failure = '1') ORDER BY sourcerowcount"; + + " WHERE migrationid=? AND duration = '-1' AND failure = '1' ORDER BY sourcerowcount"; try (Connection connection = getConnection(context); PreparedStatement stmt = connection.prepareStatement(sql)) { - stmt.setObject(1, getTargetNodeId()); - stmt.setObject(2, context.getMigrationId()); + stmt.setObject(1, context.getMigrationId()); try (ResultSet resultSet = stmt.executeQuery()) { return convertToTask(resultSet); } diff --git a/docs/performance/PERFORMANCE-GUIDE.md b/docs/performance/PERFORMANCE-GUIDE.md index 91f35b8..c3020a0 100644 --- a/docs/performance/PERFORMANCE-GUIDE.md +++ b/docs/performance/PERFORMANCE-GUIDE.md @@ -54,6 +54,8 @@ The size of the batches each reader will query depends on the following properti `migration.data.reader.batchsize` `migration.data.reader.batchsize.{table}` +**NOTE** Table specific batch size change will be not respected when resuming migration. In this case restarted pipeline will keep initially defined batch boundaries, based on batch size set during migration initial startup. + ### Blocking Pipe The batches read by the reader workers will be written to a blocking pipe as wrapped datasets. diff --git a/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md b/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md index 93409fc..b530a91 100644 --- a/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md +++ b/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md @@ -2,14 +2,14 @@ ## Duplicate values for indexes -Symptom: +#### Symptom: Pipeline aborts during copy process with message like: ``` FAILED! Reason: The CREATE UNIQUE INDEX statement terminated because a duplicate key was found for the object name 'dbo.cmtmedias' and the index name 'cmtcodeVersionIDX_30'. The duplicate key value is (DefaultCronJobFinishNotificationTemplate_de, ). ``` -Solution: +#### Solution: This can happen if you are using a case sensitive collation on the source database either at database level or table/column level. The commerce cloud target database is case insensitive by default and will treat values like 'ABC'/'abc' as equal during index creation. @@ -19,12 +19,12 @@ If possible, remove the duplicate rows before any migration activities. In case ## Migration fails for unknown reason -Symptom: +#### Symptom: If you were overloading the system for a longer period of time, you may encounted one of the nodes was restarting in the background without notice. -Solution: +#### Solution: In any case, check the logs (Kibana). Check in dynatrace whether a process crash log exists for the node. @@ -33,12 +33,12 @@ In case the process crashed, throttle the performance by changing the respective ## MySQL: xy table does not exist error -Symptom: +#### Symptom: `java.sql.SQLSyntaxErrorException: Table '' doesn't exist` even though the table should exist. -Solution: +#### Solution: This is a changed behaviour in the driver 8x vs 5x used before. In case there are multiple catalogs in the database, the driver distorts the reading of the table information... @@ -50,7 +50,7 @@ This is a changed behaviour in the driver 8x vs 5x used before. In case there ar ## MySQL: java.sql.SQLException: HOUR_OF_DAY ... -Symptom: +#### Symptom: ``` @@ -65,7 +65,7 @@ at com.mysql.cj.jdbc.result.ResultSetImpl.getTimestamp(ResultSetImpl.java:903) ~ at com.mysql.cj.jdbc.result.ResultSetImpl.getObject(ResultSetImpl.java:1243) ~[mysql-connector-java-8.0.19.jar:8.0.19] ``` -Solution: +#### Solution: Known issue on MySQL when dealing with time/date objects. Workaround is to add... @@ -76,22 +76,22 @@ Known issue on MySQL when dealing with time/date objects. Workaround is to add.. ## Backoffice does not load -Symptom: +#### Symptom: Backoffice does not load properly after the migration. -Solution: +#### Solution: - use F4 mode (admin user) and reset the backoffice settings on the fly. - browser cache reload ## Proxy error in Hac -Symptom: +#### Symptom: -Hac throws / displays proxy errors when using migration features. +HAC throws / displays proxy errors when using migration features. -Solution: +#### Solution: Change the default proxy value in the Commerce Cloud Portal to a higher value. This can be done on the edit view of the respective endpoint. @@ -103,11 +103,11 @@ In case you were using queries including TRUE/FALSE values, you may have to chan ## Sudden increase of memory -Symptom: +#### Symptom: The memory consumption is more or less stable throughout the copy process, but then suddenly increases for certain table(s). -Solution: +#### Solution: If batching of reading and writing is not possible due to the definition of the source table, the copy process falls back to a non-batched mechanism. This requires loading the full table in memory at once which, depending on the table size, may lead to unhealthy memory consumption. @@ -115,12 +115,12 @@ For small tables this is typically not an issue, but for large tables it should ## Some tables are copied over very slowly -Symptom: +#### Symptom: While some tables are running smoothly, others seem to suffer from low throughput. This may happen for the props table for example. -Solution: +#### Solution: The copy process tries to apply batching for reading and writing where possible. For this, the source table is scanned for either a 'PK' column (normal Commerce table) or an 'ID' column (audit tables). @@ -133,3 +133,17 @@ If a table is slow, check the following: If the smallest compound unique index consists of too many columns, the reading may impose high processing load on the source database due to the sort buffer running full. Depending on the source database, you may have to tweak some db settings to efficiently process the query. Alternatively you may have to think about adding a custom unique index manually. + +## Unable to dowload migration report + +#### Symptom: + +Action _"Download Report"_ with request to `/hac/commercedbsynchac/copyReport` endpoint ends up with HTTP 500 error (visible as: `ERR_INVALID_RESPONSE`). + +Error visible in logs includes message: _Unable to make field private final java.time.LocalDate java.time.LocalDateTime.date accessible: module java.base does not "opens java.time" to unnamed module_ + +#### Solution: + +Ensure that JVM property includes: `--add-opens=java.base/java.time=ALL-UNNAMED` + +This can be configured via `ccv2.additional.catalina.opts` on SAP Commerce Cloud services property \ No newline at end of file From 1b5e053d349c81f59cc0d6a210701a754ef7a35f Mon Sep 17 00:00:00 2001 From: CMT Technical User Date: Wed, 14 Feb 2024 09:05:28 +0000 Subject: [PATCH 2/3] Synchronize repository - 2024-02-14 --- .../resources/commercedbsync-beans.xml | 1 + .../sql/createSchedulerTablesHANA.sql | 1 + .../sql/createSchedulerTablesMSSQL.sql | 1 + .../sql/createSchedulerTablesMYSQL.sql | 1 + .../sql/createSchedulerTablesORACLE.sql | 1 + .../sql/createSchedulerTablesPOSTGRESQL.sql | 1 + .../impl/DefaultDataPipeFactory.java | 14 +++-------- .../commercedbsync/context/CopyContext.java | 12 +++++++-- .../context/impl/DefaultMigrationContext.java | 2 +- .../CopyDatabaseTableEventListener.java | 6 ++--- .../impl/DefaultDataCopyItemProvider.java | 4 ++- .../repository/DataRepository.java | 2 +- .../impl/AbstractDataRepository.java | 2 +- .../repository/impl/AzureDataRepository.java | 3 +-- .../repository/impl/NullRepository.java | 2 +- .../repository/impl/OracleDataRepository.java | 3 +-- .../DefaultDatabaseCopyTaskRepository.java | 16 ++++++++++-- .../metric/impl/DefaultMetricService.java | 10 +++++--- .../metric/populator/MetricPopulator.java | 4 +++ .../populator/impl/DTUMetricPopulator.java | 25 +++++++++++++++++-- 20 files changed, 79 insertions(+), 32 deletions(-) diff --git a/commercedbsync/resources/commercedbsync-beans.xml b/commercedbsync/resources/commercedbsync-beans.xml index 226f07c..e7bd9c6 100644 --- a/commercedbsync/resources/commercedbsync-beans.xml +++ b/commercedbsync/resources/commercedbsync-beans.xml @@ -39,6 +39,7 @@ + diff --git a/commercedbsync/resources/sql/createSchedulerTablesHANA.sql b/commercedbsync/resources/sql/createSchedulerTablesHANA.sql index d2a9c74..2bdf7b2 100644 --- a/commercedbsync/resources/sql/createSchedulerTablesHANA.sql +++ b/commercedbsync/resources/sql/createSchedulerTablesHANA.sql @@ -44,6 +44,7 @@ CREATE TABLE MIGRATIONTOOLKIT_TABLECOPYTASKS ( copymethod NVARCHAR(255) NULL, keycolumns NVARCHAR(255) NULL, durationinseconds numeric(10,2) NULL DEFAULT 0, + batchsize int NOT NULL DEFAULT 1000, PRIMARY KEY (migrationid, targetnodeid, pipelinename) ); diff --git a/commercedbsync/resources/sql/createSchedulerTablesMSSQL.sql b/commercedbsync/resources/sql/createSchedulerTablesMSSQL.sql index a34bc37..35bee62 100644 --- a/commercedbsync/resources/sql/createSchedulerTablesMSSQL.sql +++ b/commercedbsync/resources/sql/createSchedulerTablesMSSQL.sql @@ -21,6 +21,7 @@ CREATE TABLE MIGRATIONTOOLKIT_TABLECOPYTASKS ( copymethod NVARCHAR(255) NULL, keycolumns NVARCHAR(255) NULL, durationinseconds numeric(10,2) NULL DEFAULT 0, + batchsize int NOT NULL DEFAULT 1000, PRIMARY KEY (migrationid, targetnodeid, pipelinename) ); diff --git a/commercedbsync/resources/sql/createSchedulerTablesMYSQL.sql b/commercedbsync/resources/sql/createSchedulerTablesMYSQL.sql index 1838fe4..155277b 100644 --- a/commercedbsync/resources/sql/createSchedulerTablesMYSQL.sql +++ b/commercedbsync/resources/sql/createSchedulerTablesMYSQL.sql @@ -21,6 +21,7 @@ CREATE TABLE MIGRATIONTOOLKIT_TABLECOPYTASKS copymethod VARCHAR(255) NULL, keycolumns VARCHAR(255) NULL, durationinseconds numeric(10, 2) NULL DEFAULT 0, + batchsize int NOT NULL DEFAULT 1000, PRIMARY KEY (migrationid, targetnodeid, pipelinename) ); # diff --git a/commercedbsync/resources/sql/createSchedulerTablesORACLE.sql b/commercedbsync/resources/sql/createSchedulerTablesORACLE.sql index 1b13a17..41f4719 100644 --- a/commercedbsync/resources/sql/createSchedulerTablesORACLE.sql +++ b/commercedbsync/resources/sql/createSchedulerTablesORACLE.sql @@ -27,6 +27,7 @@ CREATE TABLE MIGRATIONTOOLKIT_TABLECOPYTASKS ( copymethod NVARCHAR2(255) NULL, keycolumns NVARCHAR2(255) NULL, durationinseconds number(10,2) DEFAULT 0 NULL, + batchsize number(10) DEFAULT 1000 NOT NULL, PRIMARY KEY (migrationid, targetnodeid, pipelinename) ) / diff --git a/commercedbsync/resources/sql/createSchedulerTablesPOSTGRESQL.sql b/commercedbsync/resources/sql/createSchedulerTablesPOSTGRESQL.sql index d5440aa..d20ed15 100644 --- a/commercedbsync/resources/sql/createSchedulerTablesPOSTGRESQL.sql +++ b/commercedbsync/resources/sql/createSchedulerTablesPOSTGRESQL.sql @@ -22,6 +22,7 @@ CREATE TABLE MIGRATIONTOOLKIT_TABLECOPYTASKS ( copymethod VARCHAR(255) NULL, keycolumns VARCHAR(255) NULL, durationinseconds numeric(10,2) NULL DEFAULT 0, + batchsize int NOT NULL DEFAULT 1000, PRIMARY KEY (migrationid, targetnodeid, pipelinename) ); diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java index a08c110..56bbc87 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/concurrent/impl/DefaultDataPipeFactory.java @@ -107,7 +107,7 @@ private void scheduleWorkers(CopyContext context, DataWorkerExecutor wo context.getMigrationContext().getDataSourceRepository()); String table = copyItem.getSourceItem(); long totalRows = copyItem.getRowCount(); - long pageSize = getReaderBatchSizeForTable(context, table); + int pageSize = copyItem.getBatchSize(); try { PerformanceRecorder recorder = context.getPerformanceProfiler().createRecorder(PerformanceCategory.DB_READ, table); @@ -186,13 +186,12 @@ private void scheduleWorkers(CopyContext context, DataWorkerExecutor wo taskRepository.updateTaskCopyMethod(context, copyItem, DataCopyMethod.SEEK.toString()); taskRepository.updateTaskKeyColumns(context, copyItem, Lists.newArrayList(batchColumn)); - List> batchMarkersList = null; + List> batchMarkersList; if (context.getMigrationContext().isSchedulerResumeEnabled()) { - batchMarkersList = new ArrayList<>(); Set pendingBatchesForPipeline = taskRepository .findPendingBatchesForPipeline(context, copyItem); - batchMarkersList.addAll(pendingBatchesForPipeline.stream() - .map(b -> Collections.list(b.getLowerBoundary())).collect(Collectors.toList())); + batchMarkersList = pendingBatchesForPipeline.stream() + .map(b -> Collections.list(b.getLowerBoundary())).collect(Collectors.toList()); taskRepository.resetPipelineBatches(context, copyItem); } else { MarkersQueryDefinition queryDefinition = new MarkersQueryDefinition(); @@ -241,9 +240,4 @@ private void scheduleWorkers(CopyContext context, DataWorkerExecutor wo throw new RuntimeException("Exception while preparing reader tasks", ex); } } - - private static int getReaderBatchSizeForTable(final CopyContext context, final String tableName) { - Integer tableBatchSize = context.getMigrationContext().getReaderBatchSize(tableName); - return tableBatchSize == null ? context.getMigrationContext().getReaderBatchSize() : tableBatchSize; - } } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/CopyContext.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/CopyContext.java index e99c0c9..66c526c 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/CopyContext.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/CopyContext.java @@ -70,16 +70,20 @@ public static class DataCopyItem { private final String targetItem; private final Map columnMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private final Long rowCount; + private final Integer batchSize; - public DataCopyItem(String sourceItem, String targetItem) { + public DataCopyItem(String sourceItem, String targetItem, Integer batchSize) { this.sourceItem = sourceItem; this.targetItem = targetItem; + this.batchSize = batchSize; this.rowCount = null; } - public DataCopyItem(String sourceItem, String targetItem, Map columnMap, Long rowCount) { + public DataCopyItem(String sourceItem, String targetItem, Map columnMap, Long rowCount, + Integer batchSize) { this.sourceItem = sourceItem; this.targetItem = targetItem; + this.batchSize = batchSize; this.columnMap.clear(); this.columnMap.putAll(columnMap); this.rowCount = rowCount; @@ -109,6 +113,10 @@ public Long getRowCount() { return rowCount; } + public Integer getBatchSize() { + return batchSize; + } + @Override public String toString() { return new StringJoiner(", ", DataCopyItem.class.getSimpleName() + "[", "]") diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/impl/DefaultMigrationContext.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/impl/DefaultMigrationContext.java index 0d44af9..4e93efc 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/impl/DefaultMigrationContext.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/context/impl/DefaultMigrationContext.java @@ -116,7 +116,7 @@ public int getReaderBatchSize() { public Integer getReaderBatchSize(final String tableName) { String tblConfKey = CommercedbsyncConstants.MIGRATION_DATA_READER_BATCHSIZE_FOR_TABLE.replace("{table}", tableName); - return configuration.getInteger(tblConfKey, null); + return configuration.getInteger(tblConfKey, getReaderBatchSize()); } @Override diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/events/handlers/CopyDatabaseTableEventListener.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/events/handlers/CopyDatabaseTableEventListener.java index c5acbc7..bcf0f58 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/events/handlers/CopyDatabaseTableEventListener.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/events/handlers/CopyDatabaseTableEventListener.java @@ -56,9 +56,9 @@ protected void onEvent(final CopyDatabaseTableEvent event) { CopyContext copyContext = new CopyContext(migrationId, migrationContext, new HashSet<>(), performanceProfiler); Set copyTableTasks = databaseCopyTaskRepository.findPendingTasks(copyContext); - Set items = copyTableTasks - .stream().map(task -> new CopyContext.DataCopyItem(task.getSourcetablename(), - task.getTargettablename(), task.getColumnmap(), task.getSourcerowcount())) + Set items = copyTableTasks.stream() + .map(task -> new CopyContext.DataCopyItem(task.getSourcetablename(), task.getTargettablename(), + task.getColumnmap(), task.getSourcerowcount(), task.getBatchsize())) .collect(Collectors.toSet()); copyContext.getCopyItems().addAll(items); databaseMigrationCopyService.copyAllAsync(copyContext); diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/provider/impl/DefaultDataCopyItemProvider.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/provider/impl/DefaultDataCopyItemProvider.java index 10b88a0..99b0a39 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/provider/impl/DefaultDataCopyItemProvider.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/provider/impl/DefaultDataCopyItemProvider.java @@ -238,7 +238,9 @@ private CopyContext.DataCopyItem createCopyItem(final MigrationContext context, final String targetTableName = targetTable.getFullTableName(); DataRepository sds = context.getDataSourceRepository(); String sTableName = context.getItemTypeViewNameByTable(sourceTableName, sds); - final CopyContext.DataCopyItem dataCopyItem = new CopyContext.DataCopyItem(sTableName, targetTableName); + int batchSize = context.getReaderBatchSize(sourceTableName); + final CopyContext.DataCopyItem dataCopyItem = new CopyContext.DataCopyItem(sTableName, targetTableName, + batchSize); addColumnMappingsIfNecessary(context, sourceTable, dataCopyItem); return dataCopyItem; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/DataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/DataRepository.java index fbaf235..b041d24 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/DataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/DataRepository.java @@ -77,7 +77,7 @@ long getRowCountModifiedAfter(String table, Instant time, boolean isDeletionEnab void runSqlScriptOnPrimary(final Resource resource); - float getDatabaseUtilization() throws SQLException; + float getDatabaseUtilization(); int truncateTable(String table) throws Exception; diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AbstractDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AbstractDataRepository.java index e57a5d8..c1679e1 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AbstractDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AbstractDataRepository.java @@ -185,7 +185,7 @@ public void runSqlScriptOnPrimary(Resource resource) { } @Override - public float getDatabaseUtilization() throws SQLException { + public float getDatabaseUtilization() { throw new UnsupportedOperationException("Must be added in the specific repository implementation"); } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AzureDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AzureDataRepository.java index 21fe2e8..c557526 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AzureDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/AzureDataRepository.java @@ -8,7 +8,6 @@ import java.sql.Connection; import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.List; @@ -186,7 +185,7 @@ public String getDatabaseTimezone() { } @Override - public float getDatabaseUtilization() throws SQLException { + public float getDatabaseUtilization() { String query = "SELECT TOP 1 end_time, (SELECT Max(v) FROM (VALUES (avg_cpu_percent),(avg_data_io_percent),(avg_log_write_percent)) AS value(v)) AS [avg_DTU_percent] FROM sys.dm_db_resource_stats ORDER by end_time DESC;"; try (Connection connection = getConnection(); Statement stmt = connection.createStatement(); diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java index c80630d..39d803e 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java @@ -153,7 +153,7 @@ public void runSqlScriptOnPrimary(Resource resource) { } @Override - public float getDatabaseUtilization() throws SQLException { + public float getDatabaseUtilization() { throw new InvalidDataSourceConfigurationException(this.message, this.dataSourceConfiguration); } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java index 5830b46..0dd1070 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java @@ -9,7 +9,6 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Types; import java.util.Collections; import java.util.List; @@ -181,7 +180,7 @@ public void runSqlScript(final Resource resource) { } @Override - public float getDatabaseUtilization() throws SQLException { + public float getDatabaseUtilization() { return (float) 1.00; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java index eee5f88..5a82fd3 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java @@ -180,7 +180,7 @@ private LocalDateTime getDateTime(ResultSet rs, String column) throws Exception public synchronized void scheduleTask(CopyContext context, CopyContext.DataCopyItem copyItem, long sourceRowCount, int targetNode) throws Exception { String insert = "INSERT INTO " + TABLECOPYTASKS - + " (targetnodeid, pipelinename, sourcetablename, targettablename, columnmap, migrationid, sourcerowcount, lastupdate) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"; + + " (targetnodeid, pipelinename, sourcetablename, targettablename, columnmap, migrationid, sourcerowcount, batchsize, lastupdate) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; try (Connection conn = getConnection(context); PreparedStatement stmt = conn.prepareStatement(insert)) { stmt.setObject(1, targetNode); stmt.setObject(2, copyItem.getPipelineName()); @@ -189,7 +189,8 @@ public synchronized void scheduleTask(CopyContext context, CopyContext.DataCopyI stmt.setObject(5, new Gson().toJson(copyItem.getColumnMap())); stmt.setObject(6, context.getMigrationId()); stmt.setObject(7, sourceRowCount); - setTimestamp(stmt, 8, now()); + stmt.setObject(8, copyItem.getBatchSize()); + setTimestamp(stmt, 9, now()); stmt.executeUpdate(); } } @@ -511,11 +512,22 @@ private Set convertToTask(ResultSet rs) throws Exception { copyTask.setCopyMethod(rs.getString("copymethod")); copyTask.setKeyColumns(Splitter.on(",") .splitToList(StringUtils.defaultIfEmpty(rs.getString("keycolumns"), StringUtils.EMPTY))); + setBatchSizeSafely(copyTask, rs); copyTasks.add(copyTask); } return copyTasks; } + // just a temporary fallback to handle ongoing migrations, where this column is + // not yet available + private void setBatchSizeSafely(final DatabaseCopyTask copyTask, ResultSet rs) { + try { + copyTask.setBatchsize(rs.getInt("batchsize")); + } catch (SQLException e) { + copyTask.setBatchsize(1000); + } + } + private Set convertToBatch(ResultSet rs) throws Exception { Set copyBatches = new LinkedHashSet<>(); while (rs.next()) { diff --git a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/impl/DefaultMetricService.java b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/impl/DefaultMetricService.java index 4d4aacf..2df3ef5 100644 --- a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/impl/DefaultMetricService.java +++ b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/impl/DefaultMetricService.java @@ -30,10 +30,12 @@ public DefaultMetricService(List populators) { public List getMetrics(MigrationContext context) { List dataList = new ArrayList<>(); for (MetricPopulator populator : populators) { - try { - dataList.add(populator.populate(context)); - } catch (Exception e) { - LOG.error("Error while populating metric. Populator: " + e.getMessage()); + if (populator.canHandle(context)) { + try { + dataList.add(populator.populate(context)); + } catch (Exception e) { + LOG.error("Error while populating metric. Populator: " + e.getMessage()); + } } } return dataList; diff --git a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/MetricPopulator.java b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/MetricPopulator.java index f9f226e..6e94bde 100644 --- a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/MetricPopulator.java +++ b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/MetricPopulator.java @@ -23,4 +23,8 @@ default void populateColors(MetricData data) { data.setSecondaryValueStandardColor(SECONDARY_STANDARD_COLOR); data.setSecondaryValueCriticalColor(SECONDARY_CRITICAL_COLOR); } + + default boolean canHandle(MigrationContext context) { + return true; + } } diff --git a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/impl/DTUMetricPopulator.java b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/impl/DTUMetricPopulator.java index de88c25..be223be 100644 --- a/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/impl/DTUMetricPopulator.java +++ b/commercedbsynchac/src/com/sap/cx/boosters/commercedbsynchac/metric/populator/impl/DTUMetricPopulator.java @@ -6,15 +6,19 @@ package com.sap.cx.boosters.commercedbsynchac.metric.populator.impl; +import com.sap.cx.boosters.commercedbsync.repository.DataRepository; +import com.sap.cx.boosters.commercedbsync.repository.impl.AzureDataRepository; import com.sap.cx.boosters.commercedbsynchac.metric.populator.MetricPopulator; import de.hybris.platform.commercedbsynchac.data.MetricData; import com.sap.cx.boosters.commercedbsync.context.MigrationContext; +import java.util.Optional; + public class DTUMetricPopulator implements MetricPopulator { @Override public MetricData populate(MigrationContext context) throws Exception { - MetricData data = new MetricData(); - int primaryValue = (int) context.getDataTargetRepository().getDatabaseUtilization(); + int primaryValue = getAzureDataRepository(context).map(DataRepository::getDatabaseUtilization).orElse(-1f) + .intValue(); if (primaryValue > 100) { primaryValue = 100; } @@ -24,6 +28,8 @@ public MetricData populate(MigrationContext context) throws Exception { secondaryValue = -1; } + MetricData data = new MetricData(); + data.setMetricId("dtu"); data.setName("DTU"); data.setDescription("The current DTU utilization of the azure database"); @@ -36,7 +42,22 @@ public MetricData populate(MigrationContext context) throws Exception { data.setSecondaryValueUnit("%"); data.setSecondaryValueThreshold(0d); populateColors(data); + return data; } + private Optional getAzureDataRepository(MigrationContext context) { + if (context.getDataTargetRepository() instanceof AzureDataRepository) { + return Optional.of(context.getDataTargetRepository()); + } else if (context.getDataSourceRepository() instanceof AzureDataRepository) { + return Optional.of(context.getDataSourceRepository()); + } + + return Optional.empty(); + } + + @Override + public boolean canHandle(MigrationContext context) { + return getAzureDataRepository(context).isPresent(); + } } From 1c5eee0341efcb3c97265c89b3a86ebd58644969 Mon Sep 17 00:00:00 2001 From: CMT Technical User Date: Fri, 16 Feb 2024 10:26:41 +0000 Subject: [PATCH 3/3] Synchronize repository - 2024-02-16 --- .../resources/commercedbsync-items.xml | 13 ++++- .../resources/commercedbsync-spring.xml | 1 + .../commercedbsync-locales_en.properties | 6 +++ .../jobs/AbstractMigrationJobPerformable.java | 49 ++++++++++++++++--- .../commercedbsync/jobs/FullMigrationJob.java | 28 ++++++++++- ...ransformFunctionGeneratorPreProcessor.java | 13 +++-- .../TypeInfoTableGeneratorPreProcessor.java | 13 +++-- .../repository/impl/HanaDataRepository.java | 2 +- .../repository/impl/MySQLDataRepository.java | 6 ++- .../repository/impl/NullRepository.java | 1 - .../repository/impl/OracleDataRepository.java | 2 +- .../CustomClusterDatabaseCopyScheduler.java | 9 ++-- .../service/DatabaseCopyTaskRepository.java | 9 ++++ .../service/DatabaseMigrationService.java | 12 +++++ .../DefaultDatabaseCopyTaskRepository.java | 13 +++++ .../impl/DefaultDatabaseMigrationService.java | 8 ++- .../hac/resources/jsp/dataCopy.jsp | 4 +- .../hac/resources/static/js/dataCopy.js | 10 +++- .../CommercemigrationhacController.java | 10 +++- docs/troubleshooting/TROUBLESHOOTING-GUIDE.md | 16 +++++- 20 files changed, 188 insertions(+), 37 deletions(-) diff --git a/commercedbsync/resources/commercedbsync-items.xml b/commercedbsync/resources/commercedbsync-items.xml index 6d1629e..1e7091e 100644 --- a/commercedbsync/resources/commercedbsync-items.xml +++ b/commercedbsync/resources/commercedbsync-items.xml @@ -100,7 +100,18 @@ false - + + + Resume a failed migration + + + false + + + Migration Id for the job + + + diff --git a/commercedbsync/resources/commercedbsync-spring.xml b/commercedbsync/resources/commercedbsync-spring.xml index 3ac4eef..9adfbf3 100644 --- a/commercedbsync/resources/commercedbsync-spring.xml +++ b/commercedbsync/resources/commercedbsync-spring.xml @@ -315,6 +315,7 @@ + diff --git a/commercedbsync/resources/localization/commercedbsync-locales_en.properties b/commercedbsync/resources/localization/commercedbsync-locales_en.properties index 010258d..1872fa0 100644 --- a/commercedbsync/resources/localization/commercedbsync-locales_en.properties +++ b/commercedbsync/resources/localization/commercedbsync-locales_en.properties @@ -30,3 +30,9 @@ type.MigrationCronJob.maxWriterWorkers.description=Number of writer workers to b type.MigrationCronJob.batchSize.name=Batch Size type.MigrationCronJob.batchSize.description=Batch size used to query data + +type.FullMigrationCronJob.resumeMigration.name=Resume Migration +type.FullMigrationCronJob.resumeMigration.description= + +type.FullMigrationCronJob.migrationId.name=Migration ID +type.FullMigrationCronJob.migrationId.description= diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/AbstractMigrationJobPerformable.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/AbstractMigrationJobPerformable.java index 86c53b6..aaff5a8 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/AbstractMigrationJobPerformable.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/AbstractMigrationJobPerformable.java @@ -74,12 +74,41 @@ public abstract class AbstractMigrationJobPerformable extends AbstractJobPerform @Override public boolean isPerformable() { for (CronJobModel cronJob : getCronJobService().getRunningOrRestartedCronJobs()) { + currentMigrationId = databaseMigrationService.getMigrationID(migrationContext); + if ((cronJob instanceof IncrementalMigrationCronJobModel || cronJob instanceof FullMigrationCronJobModel)) { - LOG.info("Previous migrations job already running {} and Type {} ", cronJob.getCode(), - cronJob.getItemtype()); + if (isJobStateAborted(cronJob)) { + try { + abortCurrentMigration(); + databaseMigrationService.markRemainingTasksAborted(migrationContext, currentMigrationId); + clearAbortRequestedIfNeeded(cronJob); + + LOG.info("Migration with ID: {} was marked as aborted", currentMigrationId); + } catch (Exception e) { + LOG.warn("Failed to abort current migration"); + LOG.debug("Migration abort failed", e); + } + } else { + LOG.info("Previous migration job already running {} and type {}", cronJob.getCode(), + cronJob.getItemtype()); + } return false; } } + + if (StringUtils.isNotEmpty(currentMigrationId)) { + try { + if (databaseMigrationService.getMigrationState(migrationContext, currentMigrationId) + .getStatus() == MigrationProgress.RUNNING) { + LOG.info("Previous migration already running, ID: {}", currentMigrationId); + return false; + } + } catch (Exception e) { + LOG.warn("Unable to fetch current migration status"); + LOG.debug("Migration status fetch failed", e); + } + } + return true; } @@ -199,13 +228,12 @@ protected MigrationStatus waitForFinishCronjobs(IncrementalMigrationContext cont } while (StringUtils.equalsAnyIgnoreCase(status.getStatus().toString(), RUNNING_MIGRATION)); if (aborted) { - LOG.info(" Aborted ...STOPPING migration "); - databaseMigrationService.stopMigration(migrationContext, currentMigrationId); + abortCurrentMigration(); + clearAbortRequestedIfNeeded(cronJobModel); LOG.error("Database migration has been ABORTED, Migration State= " + status + ", Total Tasks " + status.getTotalTasks() + ", migration id =" + status.getMigrationID() + ", Completed Tasks " + status.getCompletedTasks()); - clearAbortRequestedIfNeeded(cronJobModel); - throw new AbortCronJobException("CronJOB ABORTED"); + throw new AbortCronJobException("Cronjob ABORTED"); } if (status.isFailed()) { @@ -218,6 +246,11 @@ protected MigrationStatus waitForFinishCronjobs(IncrementalMigrationContext cont return status; } + private void abortCurrentMigration() throws Exception { + LOG.info("Aborted ...STOPPING migration"); + databaseMigrationService.stopMigration(migrationContext, currentMigrationId); + } + protected LaunchOptions createLaunchOptions(MigrationCronJobModel migrationCronJob) { final LaunchOptions launchOptions = new LaunchOptions(); @@ -240,8 +273,8 @@ private void putLaunchOptionProperty(final LaunchOptions launchOptions, String p protected boolean isJobStateAborted(final CronJobModel cronJobModel) { this.modelService.refresh(cronJobModel); - LOG.info("cron job status = " + cronJobModel.getStatus()); - LOG.info("cron job request to abort =" + cronJobModel.getRequestAbort()); + LOG.info("Cron job status: {}", cronJobModel.getStatus()); + LOG.info("Cron job request to abort: {}", BooleanUtils.isTrue(cronJobModel.getRequestAbort())); return ((cronJobModel.getStatus() == CronJobStatus.ABORTED) || (cronJobModel.getRequestAbort() != null && cronJobModel.getRequestAbort())); } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/FullMigrationJob.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/FullMigrationJob.java index d3ebcd6..42abe9c 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/FullMigrationJob.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/jobs/FullMigrationJob.java @@ -7,13 +7,16 @@ package com.sap.cx.boosters.commercedbsync.jobs; import com.google.common.base.Preconditions; +import com.sap.cx.boosters.commercedbsync.constants.CommercedbsyncConstants; import com.sap.cx.boosters.commercedbsync.context.IncrementalMigrationContext; +import com.sap.cx.boosters.commercedbsync.context.LaunchOptions; import de.hybris.platform.cronjob.enums.CronJobResult; import de.hybris.platform.cronjob.enums.CronJobStatus; import de.hybris.platform.cronjob.jalo.AbortCronJobException; import de.hybris.platform.cronjob.model.CronJobModel; import de.hybris.platform.servicelayer.cronjob.PerformResult; import com.sap.cx.boosters.commercedbsync.model.cron.FullMigrationCronJobModel; +import de.hybris.platform.servicelayer.model.ModelService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -24,6 +27,12 @@ public class FullMigrationJob extends AbstractMigrationJobPerformable { private static final Logger LOG = LoggerFactory.getLogger(FullMigrationJob.class); + private final ModelService modelService; + + public FullMigrationJob(final ModelService modelService) { + this.modelService = modelService; + } + @Override public PerformResult perform(final CronJobModel cronJobModel) { FullMigrationCronJobModel fullMigrationCronJobModel; @@ -54,9 +63,24 @@ public PerformResult perform(final CronJobModel cronJobModel) { incrementalMigrationContext.setIncrementalModeEnabled(false); incrementalMigrationContext .setFullDatabaseMigrationEnabled(fullMigrationCronJobModel.isFullDatabaseMigration()); - currentMigrationId = databaseMigrationService.startMigration(incrementalMigrationContext, - createLaunchOptions(fullMigrationCronJobModel)); + final LaunchOptions launchOptions = createLaunchOptions(fullMigrationCronJobModel); + if (fullMigrationCronJobModel.isResumeMigration()) { + currentMigrationId = fullMigrationCronJobModel.getMigrationId(); + Preconditions.checkNotNull(currentMigrationId, + "Migration ID must be present to resume failed migration job"); + launchOptions.getPropertyOverrideMap().put(CommercedbsyncConstants.MIGRATION_SCHEDULER_RESUME_ENABLED, + true); + databaseMigrationService.resumeUnfinishedMigration(incrementalMigrationContext, launchOptions, + currentMigrationId); + LOG.info("Resumed Migration {}", currentMigrationId); + } else { + currentMigrationId = databaseMigrationService.startMigration(incrementalMigrationContext, + launchOptions); + LOG.info("Started Migration {}", currentMigrationId); + fullMigrationCronJobModel.setMigrationId(currentMigrationId); + modelService.save(fullMigrationCronJobModel); + } waitForFinishCronjobs(incrementalMigrationContext, currentMigrationId, cronJobModel); } catch (final AbortCronJobException e) { return new PerformResult(CronJobResult.ERROR, CronJobStatus.ABORTED); diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TransformFunctionGeneratorPreProcessor.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TransformFunctionGeneratorPreProcessor.java index f3fa44b..068a030 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TransformFunctionGeneratorPreProcessor.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TransformFunctionGeneratorPreProcessor.java @@ -34,18 +34,21 @@ public void process(final CopyContext context) { } private String getPlatformSpecificSQL(final DataBaseProvider databaseProvider) { - String platformSpecificSQL = "mssql-general.sql"; - if (databaseProvider.isHanaUsed() || databaseProvider.isOracleUsed() || databaseProvider.isPostgreSqlUsed()) { - platformSpecificSQL = null; + String platformSpecificSQL = null; + + if (databaseProvider.isMssqlUsed()) { + platformSpecificSQL = "mssql-general.sql"; } - LOG.info("Identified platform specific transformation function SQL {}", platformSpecificSQL); + LOG.info("Identified platform specific transformation function SQL: {}", + StringUtils.defaultIfEmpty(platformSpecificSQL, "")); return platformSpecificSQL; } @Override public boolean shouldExecute(CopyContext context) { - return context.getMigrationContext().isDataExportEnabled(); + return context.getMigrationContext().isDataExportEnabled() + && context.getMigrationContext().getDataSourceRepository().getDatabaseProvider().isMssqlUsed(); } } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TypeInfoTableGeneratorPreProcessor.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TypeInfoTableGeneratorPreProcessor.java index aa29653..69ca53e 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TypeInfoTableGeneratorPreProcessor.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/processors/impl/TypeInfoTableGeneratorPreProcessor.java @@ -68,16 +68,19 @@ public void process(final CopyContext context) { @Override public boolean shouldExecute(CopyContext context) { - return context.getMigrationContext().isDataExportEnabled(); + return context.getMigrationContext().isDataExportEnabled() + && context.getMigrationContext().getDataSourceRepository().getDatabaseProvider().isMssqlUsed(); } private String getPlatformSpecificSQL(final DataBaseProvider databaseProvider) { - String platformSpecificSQL = "mssql-typeinfotable.sql"; - if (databaseProvider.isHanaUsed() || databaseProvider.isOracleUsed() || databaseProvider.isPostgreSqlUsed()) { - platformSpecificSQL = null; + String platformSpecificSQL = null; + + if (databaseProvider.isMssqlUsed()) { + platformSpecificSQL = "mssql-typeinfotable.sql"; } - LOG.info("Identified platform specific typeinfo table SQL {}", platformSpecificSQL); + LOG.info("Identified platform specific typeinfo table SQL: {}", + StringUtils.defaultIfEmpty(platformSpecificSQL, "")); return platformSpecificSQL; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/HanaDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/HanaDataRepository.java index 62366bd..c8a3bfa 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/HanaDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/HanaDataRepository.java @@ -96,7 +96,7 @@ public String getDatabaseTimezone() { return rs.getString("VALUE"); } } catch (Exception e) { - e.getMessage(); + LOG.warn("Failed to check database timezone", e); } return null; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/MySQLDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/MySQLDataRepository.java index 34f6f0d..01eaa7b 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/MySQLDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/MySQLDataRepository.java @@ -18,6 +18,8 @@ import de.hybris.bootstrap.ddl.DatabaseSettings; import de.hybris.bootstrap.ddl.HybrisPlatform; import org.apache.ddlutils.Platform; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.core.io.Resource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; @@ -28,6 +30,8 @@ import java.util.List; public class MySQLDataRepository extends AbstractDataRepository { + private static final Logger LOG = LoggerFactory.getLogger(MySQLDataRepository.class); + public MySQLDataRepository(MigrationContext migrationContext, DataSourceConfiguration dataSourceConfiguration, DatabaseMigrationDataTypeMapperService databaseMigrationDataTypeMapperService) { super(migrationContext, dataSourceConfiguration, databaseMigrationDataTypeMapperService); @@ -149,7 +153,7 @@ public String getDatabaseTimezone() { return rs.getString("timezone"); } } catch (Exception e) { - e.getMessage(); + LOG.warn("Failed to check database timezone", e); } return null; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java index 39d803e..07ed536 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/NullRepository.java @@ -240,7 +240,6 @@ public String buildBulkUpsertStatement(String table, List columnsToCopy, @Override public String getDatabaseTimezone() { - return null; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java index 0dd1070..835cba2 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/repository/impl/OracleDataRepository.java @@ -275,7 +275,7 @@ public String getDatabaseTimezone() { return "Different timezone"; } } catch (Exception e) { - e.getMessage(); + LOG.warn("Failed to check database timezone", e); } return null; } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/scheduler/impl/CustomClusterDatabaseCopyScheduler.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/scheduler/impl/CustomClusterDatabaseCopyScheduler.java index 06e7206..122e586 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/scheduler/impl/CustomClusterDatabaseCopyScheduler.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/scheduler/impl/CustomClusterDatabaseCopyScheduler.java @@ -122,16 +122,19 @@ public void schedule(CopyContext context) throws Exception { @Override public void resumeUnfinishedItems(CopyContext copyContext) throws Exception { databaseCopySchedulerAlgorithm.reset(); - int ownNodeId = databaseCopySchedulerAlgorithm.getOwnNodeId(); Set failedTasks = databaseCopyTaskRepository.findFailedTasks(copyContext); + if (failedTasks.isEmpty()) { + throw new IllegalStateException("No pending failed table copy tasks found to be resumed"); + } + for (DatabaseCopyTask failedTask : failedTasks) { databaseCopyTaskRepository.rescheduleTask(copyContext, failedTask.getPipelinename(), databaseCopySchedulerAlgorithm.next()); } databaseCopyTaskRepository.resetMigration(copyContext); startMonitorThread(copyContext); - final CopyDatabaseTableEvent event = new CopyDatabaseTableEvent(ownNodeId, copyContext.getMigrationId(), - copyContext.getPropertyOverrideMap()); + final CopyDatabaseTableEvent event = new CopyDatabaseTableEvent(databaseCopySchedulerAlgorithm.getOwnNodeId(), + copyContext.getMigrationId(), copyContext.getPropertyOverrideMap()); eventService.publishEvent(event); } diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseCopyTaskRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseCopyTaskRepository.java index f5bbd87..b074cbf 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseCopyTaskRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseCopyTaskRepository.java @@ -143,6 +143,15 @@ Optional findPipeline(CopyContext context, CopyContext.DataCop */ void markTaskCompleted(CopyContext context, CopyContext.DataCopyItem copyItem, String duration) throws Exception; + /** + * Mark all remaining table copy tasks as aborted, should be used when migration + * was stopped unexpectedly (for example after application crashed) + * + * @param context + * @throws Exception + */ + void markRemainingTasksAborted(CopyContext context) throws Exception; + void markTaskTruncated(CopyContext context, CopyContext.DataCopyItem copyItem) throws Exception; /** diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseMigrationService.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseMigrationService.java index 39da28a..bb0310c 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseMigrationService.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/DatabaseMigrationService.java @@ -57,6 +57,18 @@ void resumeUnfinishedMigration(MigrationContext context, LaunchOptions launchOpt */ void stopMigration(MigrationContext context, String migrationID) throws Exception; + /** + * Mark all remaining table copy tasks as aborted, should be used when migration + * was stopped unexpectedly (for example after application crashed) + * + * @param context + * Migration configuration + * @param migrationID + * ID of the migration process tasks that should be marked as aborted + * @throws Exception + */ + void markRemainingTasksAborted(MigrationContext context, String migrationID) throws Exception; + /** * Get current overall state without details * diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java index 5a82fd3..d54e73a 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseCopyTaskRepository.java @@ -406,6 +406,19 @@ public synchronized void markTaskFailed(CopyContext context, CopyContext.DataCop mutePerformanceRecorder(context, copyItem); } + @Override + public synchronized void markRemainingTasksAborted(CopyContext context) throws Exception { + // spotless:off + String sql = "UPDATE " + TABLECOPYTASKS + " SET failure='1', duration='-1', error='Aborted', lastupdate=? WHERE migrationId=? AND duration IS NULL AND failure = '0'"; + // spotless:on + try (Connection connection = getConnection(context); + PreparedStatement stmt = connection.prepareStatement(sql)) { + setTimestamp(stmt, 1, now()); + stmt.setObject(2, context.getMigrationId()); + stmt.executeUpdate(); + } + } + @Override public synchronized void markTaskTruncated(CopyContext context, CopyContext.DataCopyItem copyItem) throws Exception { diff --git a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseMigrationService.java b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseMigrationService.java index dd14e4f..c553abe 100644 --- a/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseMigrationService.java +++ b/commercedbsync/src/com/sap/cx/boosters/commercedbsync/service/impl/DefaultDatabaseMigrationService.java @@ -109,12 +109,18 @@ public void stopMigration(MigrationContext context, String migrationID) throws E databaseCopyScheduler.abort(copyContext); } + @Override + public void markRemainingTasksAborted(MigrationContext context, String migrationID) throws Exception { + CopyContext copyContext = buildIdContext(context, migrationID); + databaseCopyTaskRepository.markRemainingTasksAborted(copyContext); + } + private CopyContext buildCopyContext(MigrationContext context, String migrationID) throws Exception { Set dataCopyItems = copyItemProvider.get(context); return new CopyContext(migrationID, context, dataCopyItems, performanceProfiler); } - private CopyContext buildIdContext(MigrationContext context, String migrationID) throws Exception { + private CopyContext buildIdContext(MigrationContext context, String migrationID) { // we use a lean implementation of the copy context to avoid calling the // provider which is not required for task management. return new CopyContext.IdCopyContext(migrationID, context, performanceProfiler); diff --git a/commercedbsynchac/hac/resources/jsp/dataCopy.jsp b/commercedbsynchac/hac/resources/jsp/dataCopy.jsp index 5e5307e..21ba639 100644 --- a/commercedbsynchac/hac/resources/jsp/dataCopy.jsp +++ b/commercedbsynchac/hac/resources/jsp/dataCopy.jsp @@ -24,7 +24,7 @@
-
+
">

Data Migration

@@ -38,7 +38,7 @@ I am aware of timezone differences, proceed with migration -
"> +
diff --git a/commercedbsynchac/hac/resources/static/js/dataCopy.js b/commercedbsynchac/hac/resources/static/js/dataCopy.js index 02e32d3..2316238 100644 --- a/commercedbsynchac/hac/resources/static/js/dataCopy.js +++ b/commercedbsynchac/hac/resources/static/js/dataCopy.js @@ -74,7 +74,7 @@ function resumeRunning() { $.ajax({ - url: $('#buttonsContainer').attr('data-resumeUrl'), + url: $('#migrationPanel').attr('data-resumeUrl'), type: 'GET', headers: { 'Accept': 'application/json', @@ -104,6 +104,9 @@ } currentMigrationID = data.migrationID; empty(logContainer); + + if (!currentMigrationID) return; + updateStatus(data); doPoll(); pollInterval = setInterval(doPoll, 5000); @@ -214,6 +217,9 @@ if (status.failed) { dd.innerText = "Failed"; dd.classList.add("failed"); + } else if (status.aborted) { + dd.innerText = "Aborted"; + dd.classList.add("failed") } else if (status.completed) { dd.innerText = "Completed"; dd.classList.add("completed") @@ -242,7 +248,7 @@ } function doPoll() { - console.log(new Date(lastUpdateTime).toISOString()); + // console.log(new Date(lastUpdateTime).toISOString()); $.ajax({ url: statusUrl, type: 'GET', diff --git a/commercedbsynchac/hac/src/de/hybris/platform/hac/controller/CommercemigrationhacController.java b/commercedbsynchac/hac/src/de/hybris/platform/hac/controller/CommercemigrationhacController.java index 5bbcb0a..ffe15e8 100644 --- a/commercedbsynchac/hac/src/de/hybris/platform/hac/controller/CommercemigrationhacController.java +++ b/commercedbsynchac/hac/src/de/hybris/platform/hac/controller/CommercemigrationhacController.java @@ -557,13 +557,19 @@ private ConfigPanelItemDTO createConfigItem(String id, String name, String descr } private boolean checkTimeZoneDifferences(MigrationContext context) { - TimeZone source = TimeZone.getTimeZone(context.getDataSourceRepository().getDatabaseTimezone()); + String databaseTimezone = context.getDataSourceRepository().getDatabaseTimezone(); + + if (StringUtils.isEmpty(databaseTimezone)) { + LOG.info("Database timezone for source not available!"); + return false; + } + + TimeZone source = TimeZone.getTimeZone(databaseTimezone); if (TimeZone.getTimeZone("UTC").getRawOffset() == source.getRawOffset()) { LOG.info("The timezone on source and target are the same!!"); return true; } LOG.info("The timezone on source and target are different!!"); return false; - } } diff --git a/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md b/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md index b530a91..64f7002 100644 --- a/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md +++ b/docs/troubleshooting/TROUBLESHOOTING-GUIDE.md @@ -134,7 +134,7 @@ If the smallest compound unique index consists of too many columns, the reading Depending on the source database, you may have to tweak some db settings to efficiently process the query. Alternatively you may have to think about adding a custom unique index manually. -## Unable to dowload migration report +## Unable to download migration report #### Symptom: @@ -146,4 +146,16 @@ Error visible in logs includes message: _Unable to make field private final java Ensure that JVM property includes: `--add-opens=java.base/java.time=ALL-UNNAMED` -This can be configured via `ccv2.additional.catalina.opts` on SAP Commerce Cloud services property \ No newline at end of file +This can be configured via `ccv2.additional.catalina.opts` on SAP Commerce Cloud services property + +## Application crashed or was restarted during ongoing migration + +#### Symptom: + +After application crashed or for example k8s pod was restarted during ongoing migration, when accessing _Data Migration_ view in HAC, on "Migration Log" area, some table copy tasks are visibile as "_In progress..._", however actutally no migration thread is active and no more new updates are provided. + +#### Solution: + +Investigate root cause of application crash/restart, afterwards mark all hanging tasks as aborted to be able to restart migration. To do so, get ID of current migration and execute following Groovy snippet in HAC _Console_ -> _Scripting Languages_: + +`databaseMigrationService.markRemainingTasksAborted(migrationContext, 'MIGRATION_ID')`