From bf8c1ff4bbd4ccf4a6fca6b3888e10510b1f4909 Mon Sep 17 00:00:00 2001
From: "d.lechner" <d.lechner@adito.de>
Date: Wed, 16 Oct 2019 17:35:23 +0200
Subject: [PATCH] Changes in Actions for rebuilding Duplicates

---
 .../onActionProcess.js                        | 15 +++++----
 .../onActionProcess.js                        | 33 ++++---------------
 2 files changed, 16 insertions(+), 32 deletions(-)

diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
index 61251284d2..0b3d4905f1 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
@@ -3,18 +3,21 @@ import("DuplicateScanner_lib");
 
 var filterName = "OrganisationDuplicates";
 var targetEntity = "Organisation_entity";
-var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
+var recordBlockSize = 500;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
-//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
-//logging.log("resultFields -> " + resultFields);
+logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
+logging.log(filterName + ": resultFields -> " + resultFields);
 
-logging.log("Delete all " + targetEntity + " duplicates");
+logging.log(filterName + ": Löschen von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-logging.log("Recalculate all " + targetEntity + " duplicates");
+logging.log(filterName + ": Neu berechnen von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
-DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
+logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
+DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
+
+logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
index 8f51a5021f..6189ff788b 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
@@ -1,42 +1,23 @@
-import("system.util");
-import("Employee_lib");
-import("system.process");
 import("system.logging");
 import("DuplicateScanner_lib");
 
-//let processParameters = {
-//    filterName: "PersonDuplicates",
-//    targetEntity: "Person_entity" //process.execute is only able to handle strings
-//}
-//let userId = EmployeeUtils.getCurrentUserId();
-//
-//try 
-//{
-//    let processId = "manualrun_rebuild_duplicatecache_" + util.getNewUUID();
-//    process.executeTimer(processId, "RebuildDuplicatesCache_serverProcess", 0, true, false, process.TIMERTYPE_SERVER_RUN, userId, false, process.THREADPRIORITY_LOW)
-//    process.stopTimer(processId);
-//} 
-//catch (exception) 
-//{ 
-//    logging.log(" exception-> " + exception);
-//}
-
-
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
+var recordBlockSize = 500;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
-//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
-//logging.log("resultFields -> " + resultFields);
+logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
+logging.log(filterName + ": resultFields -> " + resultFields);
 
-logging.log("Delete all " + targetEntity + " duplicates");
+logging.log(filterName + ": Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-logging.log("Recalculate all " + targetEntity + " duplicates");
+logging.log("Neu berechnen von PERSON Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
+logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
 
+logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file
-- 
GitLab