diff --git a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
index de63025985eab1691645820c6c04b4830588ac4a..eb66aa7f870dff79b9f0653f66a2dbed4f5ade00 100644
--- a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
+++ b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
@@ -8,7 +8,6 @@
             <column name="DUPLICATESCANNER_ID" type="CHAR(36)">
                 <constraints nullable="false" />
             </column>
-            <column name="DB_FIELD_NAME" type="NVARCHAR(100)" />
             <column name="ENTITY_FIELD_NAME" type="NVARCHAR(100)" />
             <column name="IS_ID_FIELD" type="INTEGER" >
                 <constraints nullable="false"/>
diff --git a/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js b/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
index 50c48d8c5bf53ca60830bb53f4533f24b8955f71..d4796d833a5d5c213fdb3282b2bac83ede763c6d 100644
--- a/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
+++ b/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
@@ -11,8 +11,25 @@ for (fieldname in model.fields)
     field = model.fields[fieldname]; 
     if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
     {
-        entityFields.push([field.name, field.name]);
+        entityFields.push([field.name, currentEntity + "." + field.name]);
     }
-//    logging.log(" Title: " + field.title); 
+//todo reactivate when loading of consumer data is implemented. see #1044577
+//    if(field.fieldType == project.ENTITYFIELDTYPE_CONSUMER)
+//    {
+//        let consumerEntity = field.entityName;
+//        
+//        if(consumerEntity == null || consumerEntity == "")
+//            continue;
+//        
+//        let consumerEntityFields = project.getEntityStructure(consumerEntity);
+//        for (consumerEntityFieldname in consumerEntityFields.fields) 
+//        { 
+//            consumerField = consumerEntityFields.fields[consumerEntityFieldname]; 
+//            if(consumerField.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//            {
+//                entityFields.push([consumerField.name, consumerEntity + "." + consumerField.name]);
+//            }
+//        }
+//    }
 }
 result.object(entityFields);
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
index 306692120f96dda6f00c9e94cfb3ad8b5359d04b..96700d80744b5acd53d38302da3fddb3d049e7cf 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
@@ -3,7 +3,7 @@ import("DuplicateScanner_lib");
 
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-
+var recordBlockSize = 5;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
@@ -14,7 +14,7 @@ logging.log("Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
 logging.log("Neu berechnen von PERSON Dubletten -> ");
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, 5,
+DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index 7bddf37d22bf811588a61624bd5b794cd6d1e01f..aa83ebf70855fd6409e6430477c3dccb5b8a8a40 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -237,23 +237,15 @@ import("JditoFilter_lib");
 //##################################single scanForDuplicates############################################
 
 
-//var filterName = "PersonDuplicates";
-//var targetEntity = "Person_entity";
-//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
-//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
-//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
-//
-//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
-//                            + " join PERSON on PERSONID = PERSON_ID"
-//                            + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"
-//                            + " where Condition for the record to be checked";
-//let targetRecordsData = db.table(queryPersonFieldData);
-//
-//let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]);
-////The first field in this Array must always be the configured id field.
-//let idField = entityFieldValuesRay[0][0];
-//let idValue = entityFieldValuesRay[0][1];
-//
+var filterName = "PersonDuplicates";
+var targetEntity = "Person_entity";
+
+//Values to check, the same fields as configured
+let valuesToCheck = {};
+valuesToCheck["CONTACTID"] = "c7ddf982-0e58-4152-b82b-8f5673b0b729";
+valuesToCheck["FIRSTNAME"] = "Tim";
+valuesToCheck["GENDER"] = "m                                   ";
+
 //let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
 //{
 //    let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
@@ -273,19 +265,17 @@ import("JditoFilter_lib");
 //    //reformat results to same structure as before
 //    return pPossibleDuplicatesRay;
 //};
-//
-////The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
-//DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields,
-//idField, idValue, formatToJsonAndCallWsCallback);
 
-//let entityStructure = project.getEntityStructure("Person_entity");
-//logging.log("entityStructure -> " + entityStructure);
-//logging.log("entityStructure.fields -> " + entityStructure.fields);
+//The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
+let pPossibleDuplicatesRay = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck, null);
+
+logging.log(" pPossibleDuplicatesRay-> " + pPossibleDuplicatesRay.length);
+
+for (let i = 0; i < pPossibleDuplicatesRay.length; i++) 
+{
+    logging.log("pPossibleDuplicatesRay[i] -> " + pPossibleDuplicatesRay[i]);
+}
 
-//for (fieldname in model.fields) 
-//{ 
-//    field = model.fields[fieldname];
-//}
 
 
 //##############################################################################
@@ -296,58 +286,86 @@ import("JditoFilter_lib");
 //logging.log("Title: " + model.title); 
 //logging.log("Description: " + model.description); 
 //logging.log("UsePermissions: " + model.usePermissions); 
+//
 //for (fieldname in model.fields) 
 //{ 
 //    field = model.fields[fieldname]; 
-//    logging.log(" Name: " + field.name); 
-//    logging.log(" Type: " + field.fieldType); 
-//    logging.log(" Title: " + field.title); 
-//    logging.log(" Description: " + field.description); 
-//    logging.log(" UsePermissions: " + field.usePermissions); 
+//    if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//    {
+//        logging.log(" Name: " + field.name); 
+//        logging.log(" Type: " + field.fieldType); 
+//        logging.log(" Title: " + field.title); 
+//        logging.log(" Description: " + field.description); 
+//        logging.log(" UsePermissions: " + field.usePermissions); 
+//        logging.log("###################### -> ");
+//    }
+//    if(field.fieldType == project.ENTITYFIELDTYPE_CONSUMER)
+//    {
+//        let consumerEntity = field.entityName;
+//        
+//        if(consumerEntity == null || consumerEntity == "")
+//            continue;
+//        
+//        let consumerEntityFields = project.getEntityStructure(consumerEntity);
+//        for (consumerEntityFieldname in consumerEntityFields.fields) 
+//        { 
+//            consumerField = consumerEntityFields.fields[consumerEntityFieldname]; 
+//            if(consumerField.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//            {
+//                logging.log(" Name: " + consumerField.name); 
+//                logging.log(" Type: " + consumerField.fieldType); 
+//                logging.log(" Title: " + consumerField.title); 
+//                logging.log(" Description: " + consumerField.description); 
+//                logging.log(" UsePermissions: " + consumerField.usePermissions); 
+//                logging.log("###################### -> ");
+//            }
+//        }
+//    }
 //}
 
+//##############################################################################
+
+//var model = project.getEntityStructure("Person_entity"); 
+//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+//
+//let combinedData = []
+//let entityFieldsToLoad = [];
+//for (field in duplicateFieldsConfig) 
+//{
+//    let entityFieldName = duplicateFieldsConfig[field][0];
+//    let isIdField = duplicateFieldsConfig[field][1];
+//    let useForIndexSearch = duplicateFieldsConfig[field][2];
+//    let entityFieldData = model[entiyFieldName];
+//    combinedData.push(entityFieldName, isIdField, useForIndexSearch, entityFieldData);
+//}
+//
+//var filterName = "PersonDuplicates";
+//var targetEntity = "Person_entity";
+//DuplicateScannerUtils.LoadEntityRecords(targetEntity, entityFieldsToLoad, 0, 50);
 
+//Beispiel 1: 
+//Feld mit verknüpftem Consumer
 
-//##############################################################################
-var targetEntity = "Person_entity";
-filterName = "PersonDuplicates";
-let pRecordsBlockSize = 5;
-let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+//[entity, feldname, consumerName, ProviderName]
+//let test = ["Communication_entity", "STANDARD_EMAIL_COMMUNICATION", "EmailCommunications", "EmailCommunications"];
+//
+//let testrow = entities.createConfigForLoadingRows()
+//                        .fields([test[1]])
+//                        .entity(test[0])
+//                        .provider(test[3])
+//                        .addParameter("ContactId_param", "d4c1bec3-656f-45ec-ae03-1c4d729d99fe")
+//                        //.uid()
+//let resultTest = entities.getRows(testrow);
+//logging.log("resultTest -> " + JSON.stringify(resultTest));
 
-let entityFieldsToLoad = [];
-for (field in duplicateFieldsConfig) 
-{
-    entityFieldsToLoad.push(duplicateFieldsConfig[field][0]);
-}
-logging.log("entityFieldsToLoad -> " + entityFieldsToLoad);
-let loadInitialRecordsConfig = entities.createConfigForLoadingRows()
-                                        .entity(targetEntity)
-                                        .fields(entityFieldsToLoad)
-                                        .count(pRecordsBlockSize);
 
-let targetRecords = entities.getRows(loadInitialRecordsConfig);
-logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
 
-let currentRecordIndex = pRecordsBlockSize;
-while(targetRecords.length > 0)
-{
-    //scan for duplicates
-    
-    if(targetRecords.length < pRecordsBlockSize)
-    {
-        logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
-        break;
-    }
-    
-    let loadNextRows = entities.createConfigForLoadingRows()
-                                .entity(targetEntity)
-                                .fields(entityFieldsToLoad)
-                                .count(pRecordsBlockSize)
-                                .startrow(currentRecordIndex);
-    logging.log("Nächster Block wird geladen mit startRow -> " + currentRecordIndex);
-    currentRecordIndex += pRecordsBlockSize;
-    targetRecords = entities.getRows(loadNextRows);
-    logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
-    
-    
-}
\ No newline at end of file
+//Beispiel 2: 
+//Feld direkt von anderem Entity
+//let testrow = entities.createConfigForLoadingRows()
+//                        .fields(["ZIP"])
+//                        .entity("Address_entity")
+//                        .uid("1a67eaa7-21da-4a18-97ab-755ac5cb74f7")
+//
+//let resultTest = entities.getRows(testrow);
+//logging.log("resultTest Beispiel 2 -> " + JSON.stringify(resultTest));
\ No newline at end of file
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 1dce76989e06d402321d616a1e18bb458056080f..9ba5c72f30fa51aaabaeb44af754798b5c878c15 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -1,3 +1,4 @@
+import("JditoFilter_lib");
 import("system.process");
 import("system.util");
 import("system.vars");
@@ -251,25 +252,17 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  * <br />
  * @param {String} pFilterName Name of the filter to use
  * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
- * @param {String} pQueryTargetRecords Query which holds the values that are being used as configured in the filter.
- * @param {String} pDuplicateFieldsConfig The index field config. Use "DuplicateScannerUtils.LoadIndexFieldsConfiguration"
- * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
+ * @param {String} pRecordsBlockSize The values which are checked get loaded in blocks.
  * @param {String} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter.
- * @param {String[]} pDuplicateFieldsConfig The configuration of the fields and their usage. @see DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration
- * Important: The first element has to be the id field!
  * @return {Int} Count of duplicate clusters created
  * 
  * @example
  * var filterName = "PersonDuplicates";
  * var targetEntity = "Person_entity";
- *
- * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+ * var recordBlockSize = 5;
+ * 
  * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
- * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
- * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
- *                             + " join PERSON on PERSONID = PERSON_ID"
- *                             + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
- *
+ * 
  * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
  * {
  *      let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
@@ -291,25 +284,46 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  * };
  *
  * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
- * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
- * duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
+ * 
+ * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, pRecordsBlockSize, formatToJsonAndCallWsCallback);
  *
  * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
  */
 DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
-pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWebserviceCallback)
+pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
 {   
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
     let alreadyIdentifiedIds = [];
     
-    let entityFieldsToLoad = [];
-    for (field in duplicateFieldsConfig) 
-    {
-        entityFieldsToLoad.push(duplicateFieldsConfig[field][0]);
-    }
-
-    logging.log("entityFieldsToLoad -> " + entityFieldsToLoad);
+    let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+    let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
+    let filterFieldConfigs = DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
+    logging.log("filterFieldConfigs -> " + filterFieldConfigs);
+    
+    let entityFieldsToLoad = DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
+    
+    logging.log("entityFieldsToLoad initial-> " + entityFieldsToLoad);
+    
+    if(filterFieldConfigs.length > 0)
+    {
+        let INDEX_FILTER_FIELD_NAME = 0;
+        for (let i = 0; i < filterFieldConfigs.length; i++) 
+        {
+            let filterFieldConfig = filterFieldConfigs[i];
+            let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
+            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
+            {
+                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                duplicateFieldsConfig.push(filterFieldConfig);
+                entityFieldsToLoad.push(filterFieldName);
+            }
+        }
+    }    
+    
+    logging.log("entityFieldsToLoad vollsätndig-> " + entityFieldsToLoad);
+    logging.log("duplicateFieldsConfig vollsätndig-> " + duplicateFieldsConfig);
     
     let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad, 0, pRecordsBlockSize);
     logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
@@ -318,7 +332,7 @@ pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWe
     while(targetRecords.length > 0)
     {
         foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pFilterName, pTargetEntity, targetRecords,
-                                pDuplicateFieldsConfig, pResultFields, useExternalWebservice, 
+                                duplicateFieldsConfig, resultFields, useExternalWebservice, 
                                 pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
         logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
         alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
@@ -339,6 +353,36 @@ pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWe
     }
 }
 
+DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig = function(pDuplicateFieldsConfig)
+{
+    let entityFieldsToLoad = [];
+    for (field in pDuplicateFieldsConfig) 
+    {
+        entityFieldsToLoad.push(pDuplicateFieldsConfig[field][0]);
+    }
+    return entityFieldsToLoad;
+}
+DuplicateScannerUtils._getFieldConfigsFromFilterFields = function(pFilterName, pTargetEntity)
+{
+    let resultFields = [];
+    let filterFields = [];
+    let filters = DuplicateScannerUtils.loadFilters(pFilterName, pTargetEntity)
+
+    for (let i = 0; i < filters.length; i++) 
+    {
+        let filter = JSON.parse(filters[i][0]).filter;
+        let fields = JditoFilterUtils.getFilterFields(filter.childs);
+        filterFields = filterFields.concat(fields);
+    }
+    for (let i = 0; i < filterFields.length; i++) 
+    {
+        let filterField = filterFields[i];
+        resultFields.push([filterField, 0, 0]);
+    }
+    return resultFields;
+}
+
+
 DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTargetRecordsData,
 pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
 {
@@ -412,16 +456,12 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
 
 DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad)
 {
-    logging.log("pTargetEntity -> " + pTargetEntity);
-    logging.log("pEntityFields -> " + pEntityFields);
-    logging.log("pCountRecordsToLoad -> " + pCountRecordsToLoad);
-    logging.log("pStartRow -> " + pStartRow);
     let getRowsConfig = entities.createConfigForLoadingRows()
                                 .entity(pTargetEntity)
                                 .fields(pEntityFields)
                                 .count(pCountRecordsToLoad)
                                 .startrow(pStartRow);
-    return entities.getRows(getRowsConfig)          
+    return entities.getRows(getRowsConfig);
 }
 /* 
  * Loads the configured resultfields as array
@@ -479,11 +519,7 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  *
  * @param {String} pFilterName Name of the filter
  * @param {String} pTargetEntity Respective target entity
- * @param {String[[]]} pFilterFieldValueRays Array of Arrays containing the name of a used field and its value.
- * Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order
- * @param {String[]} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
- * @param {String} pRecordIdFieldToIgnore Name of the id field e.g. the contact id in case of a Person duplicate
- * @param {String} pRecordIdValueToIgnore Value to the id field
+ * @param {String} pValuesToCheck An object with key value pairs which hold the name of the entity field as key and it's value as value. See the example "valuesToCheck"
  * @param {function} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter.
  * If the function is called is based on the configuration of the current scanner
  * @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured resultfields, if an external webservices was used
@@ -492,20 +528,12 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  * @example
  * var filterName = "PersonDuplicates";
  * var targetEntity = "Person_entity";
- * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
- * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
- * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
- *
- * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
- *                             + " join PERSON on PERSONID = PERSON_ID"
- *                             + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"
- *                             + " where Condition for the record to be checked";
- * let targetRecordsData = db.table(queryPersonFieldData);
- *
- * let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]);
- * //The first field in this Array must always be the configured id field.
- * let idField = entityFieldValuesRay[0][0];
- * let idValue = entityFieldValuesRay[0][1];
+ * 
+ * //Values to check, the same fields as configured
+ * let valuesToCheck = {};
+ * valuesToCheck["CONTACTID"] = "c7ddf982-0e58-4152-b82b-8f5673b0b729";
+ * valuesToCheck["FIRSTNAME"] = "Tim";
+ * valuesToCheck["GENDER"] = "m                                   ";
  *
  * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
  * {
@@ -528,16 +556,42 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  * };
  *
  * //The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
- * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields,
- * idField, idValue, formatToJsonAndCallWsCallback);
+ * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck,
+ * formatToJsonAndCallWsCallback);
  */
-DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
-pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback)
+DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck,
+pFormatValuesConsumeWebserviceCallback)
 {
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
+    let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity);
+
+    let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(pFilterName, pTargetEntity);
+    let entityFieldsToLoad = DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
+
+    let filterFieldConfigs = DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
+    if(filterFieldConfigs.length > 0)
+    {
+        let INDEX_FILTER_FIELD_NAME = 0;
+        for (let i = 0; i < filterFieldConfigs.length; i++)
+        {
+            let filterFieldConfig = filterFieldConfigs[i];
+            let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
+            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
+            {
+                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                duplicateFieldsConfig.push(filterFieldConfig);
+            }
+        }
+    }
+    
+    let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, pValuesToCheck);
+    //The first field in this Array must always be the configured id field.
+    let idField = entityFieldValuesRay[0][0];
+    let idValue = entityFieldValuesRay[0][1];
 
     return _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
-                        pFilterFieldValueRays, pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore,
+                        entityFieldValuesRay, resultFields, idField, idValue,
                          pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
 }
 
@@ -674,7 +728,7 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
  * Creates an array of arrays containing the entity field name paired with it's value.
  *
  * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration()
- * @param {[]} pTargetRecordData One record containing the values for the configured fields. Has to be in the same order as the fields in the first parameter
+ * @param {{"key", "value"}} pTargetRecordData One record containing the values for the configured fields. It's in the format of {"key(=EntityFieldName", "Value"}
  * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]]
  * @example
  * pDuplicateFieldsConfig
diff --git a/process/RebuildDuplicatesCache_serverProcess/process.js b/process/RebuildDuplicatesCache_serverProcess/process.js
index 74346c3308ee95adb4f7dc98355cf4f34b5f6a7f..5a6cd7aaac6644a224cc751b19b03d6325e9df5c 100644
--- a/process/RebuildDuplicatesCache_serverProcess/process.js
+++ b/process/RebuildDuplicatesCache_serverProcess/process.js
@@ -23,32 +23,28 @@ import("DuplicateScanner_lib");
 
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-var resultFieldsIdFieldName = "CONTACTID";
-var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from CONTACT"
-                            + " join PERSON on PERSON.PERSONID = CONTACT.PERSON_ID";
-var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"];
+var recordBlockSize = 5;
 
-DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
+DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
-tmpFieldsInFilterRay, resultFieldsIdFieldName);
+DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
 
 
 // Build Organisation duplicate cache
 
-filterName = "OrganisationDuplicates";
-targetEntity = "Organisation_entity";
-resultFieldsIdFieldName = "CONTACTID";
-queryPersonContactIds = "select CONTACTID, ORGANISATION.\"NAME\" from ORGANISATION"
-                            + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID"
-                            + " where CONTACTID != '0'";
-tmpFieldsInFilterRay = ["CONTACTID", "NAME"];
-
-DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
-
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
-tmpFieldsInFilterRay, resultFieldsIdFieldName);
-
-DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
+//filterName = "OrganisationDuplicates";
+//targetEntity = "Organisation_entity";
+//resultFieldsIdFieldName = "CONTACTID";
+//queryPersonContactIds = "select CONTACTID, ORGANISATION.\"NAME\" from ORGANISATION"
+//                            + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID"
+//                            + " where CONTACTID != '0'";
+//tmpFieldsInFilterRay = ["CONTACTID", "NAME"];
+//
+//DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
+//
+//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
+//tmpFieldsInFilterRay, resultFieldsIdFieldName);
+//
+//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file