diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
index 811f26d0b37abeb8167e4b63b6f7faed3df20e0b..61251284d271640ac5a1232508f8e6e3b2b9c88c 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js
@@ -3,7 +3,7 @@ import("DuplicateScanner_lib");
 
 var filterName = "OrganisationDuplicates";
 var targetEntity = "Organisation_entity";
-var recordBlockSize = 500;
+var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
index 155fee2cdccb0ff052ac342adbcc71990ea5cf8e..45556e0b712e7128f252bb47f63cf9abeffa073f 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
@@ -3,7 +3,7 @@ import("DuplicateScanner_lib");
 
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-var recordBlockSize = 500;
+var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index c89c0e2b40a33a3a021bb5e09bd8121242fad9b7..c9e8a0da62d501d85e4bf3a04e1908453555f7eb 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -466,6 +466,22 @@ DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds
     return db.inserts(insertQueriesRay);
 }
 
+/* 
+ * Loads the configured block size for the specified scanner
+ * 
+ * @param {String} pFilterName Name of the filter to use
+ * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
+ * @returns {Integer} Blocksize of the specified scanner
+ */
+DuplicateScannerUtils.GetBlockSizeForScanner = function(pFilterName, pTargetEntity)
+{
+    let blockSizeCondition = SqlCondition.begin()
+                                         .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+                                         .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+                                         .buildSql("select BLOCK_SIZE from DUPLICATESCANNER");
+    return db.cell(blockSizeCondition);
+}
+
 /* 
  * Searches for a cluster which contains the duplicates specified by the parameterized array. <br />
  * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned.