diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
index 61251284d271640ac5a1232508f8e6e3b2b9c88c..0b3d4905f1abbd4e6fd832715d6ef86a22321084 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
@@ -3,18 +3,21 @@ import("DuplicateScanner_lib");
 
 var filterName = "OrganisationDuplicates";
 var targetEntity = "Organisation_entity";
-var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
+var recordBlockSize = 500;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
-//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
-//logging.log("resultFields -> " + resultFields);
+logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
+logging.log(filterName + ": resultFields -> " + resultFields);
 
-logging.log("Delete all " + targetEntity + " duplicates");
+logging.log(filterName + ": Löschen von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-logging.log("Recalculate all " + targetEntity + " duplicates");
+logging.log(filterName + ": Neu berechnen von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
-DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
+logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
+DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
+
+logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
index 8f51a5021f81d034c362a267a625b3af842ae0b2..6189ff788b0a8919f52f556c38d1dd735ba27eec 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
@@ -1,42 +1,23 @@
-import("system.util");
-import("Employee_lib");
-import("system.process");
 import("system.logging");
 import("DuplicateScanner_lib");
 
-//let processParameters = {
-//    filterName: "PersonDuplicates",
-//    targetEntity: "Person_entity" //process.execute is only able to handle strings
-//}
-//let userId = EmployeeUtils.getCurrentUserId();
-//
-//try 
-//{
-//    let processId = "manualrun_rebuild_duplicatecache_" + util.getNewUUID();
-//    process.executeTimer(processId, "RebuildDuplicatesCache_serverProcess", 0, true, false, process.TIMERTYPE_SERVER_RUN, userId, false, process.THREADPRIORITY_LOW)
-//    process.stopTimer(processId);
-//} 
-//catch (exception) 
-//{ 
-//    logging.log(" exception-> " + exception);
-//}
-
-
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
+var recordBlockSize = 500;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
-//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
-//logging.log("resultFields -> " + resultFields);
+logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
+logging.log(filterName + ": resultFields -> " + resultFields);
 
-logging.log("Delete all " + targetEntity + " duplicates");
+logging.log(filterName + ": Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-logging.log("Recalculate all " + targetEntity + " duplicates");
+logging.log("Neu berechnen von PERSON Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
+logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
 
+logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file