From c664b39ba17d4d10bd7f88b68a2d7e99c31f8386 Mon Sep 17 00:00:00 2001
From: "d.buechler" <d.buechler@adito.de>
Date: Fri, 27 Sep 2019 11:44:58 +0200
Subject: [PATCH] The function ScanForDuplicates has been refactored and is now
 much more easy to use because it does much more stuff inside to have an
 easier api. "RebuildDuplicatesCache" and "ScanForDuplicates" have been
 refactored. The fields used in the prefilter don't have to be configured
 manually by the user. Instead, the fields defined by the prefilter now get
 added automatically to the list of fields to be used. In the case, that the
 field has been added via the users config anyways, it doesn't get added by
 code to ensure that each field is mentioned exactly once. To reflect these
 changes, the javadoc including the example of ScanForDuplicates and
 RebuildDuplicates has been updated.  Until it's possible to load values from
 a consumed entity dynamically via code, the possibility to select entity
 fields from consumed entities via ui has been removed. See ticket #1044577.
 Now only "native" fields directly from the base entity can be selected. 
 Since the entity fields are now used in the configuration of the duplicate
 search, no db fields have to be configured any more. The field has been
 removed from liquibase

---
 .../create_duplicateScannerIndexConfig.xml    |   1 -
 .../entity_field_name/dropDownProcess.js      |  21 ++-
 .../onActionProcess.js                        |   4 +-
 .../testduplicatescanner/onActionProcess.js   | 164 ++++++++++--------
 process/DuplicateScanner_lib/process.js       | 160 +++++++++++------
 .../process.js                                |  38 ++--
 6 files changed, 236 insertions(+), 152 deletions(-)

diff --git a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
index de63025985..eb66aa7f87 100644
--- a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
+++ b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml
@@ -8,7 +8,6 @@
             <column name="DUPLICATESCANNER_ID" type="CHAR(36)">
                 <constraints nullable="false" />
             </column>
-            <column name="DB_FIELD_NAME" type="NVARCHAR(100)" />
             <column name="ENTITY_FIELD_NAME" type="NVARCHAR(100)" />
             <column name="IS_ID_FIELD" type="INTEGER" >
                 <constraints nullable="false"/>
diff --git a/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js b/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
index 50c48d8c5b..d4796d833a 100644
--- a/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
+++ b/entity/DuplicateScannerIndexConfig_entity/entityfields/entity_field_name/dropDownProcess.js
@@ -11,8 +11,25 @@ for (fieldname in model.fields)
     field = model.fields[fieldname]; 
     if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
     {
-        entityFields.push([field.name, field.name]);
+        entityFields.push([field.name, currentEntity + "." + field.name]);
     }
-//    logging.log(" Title: " + field.title); 
+//todo reactivate when loading of consumer data is implemented. see #1044577
+//    if(field.fieldType == project.ENTITYFIELDTYPE_CONSUMER)
+//    {
+//        let consumerEntity = field.entityName;
+//        
+//        if(consumerEntity == null || consumerEntity == "")
+//            continue;
+//        
+//        let consumerEntityFields = project.getEntityStructure(consumerEntity);
+//        for (consumerEntityFieldname in consumerEntityFields.fields) 
+//        { 
+//            consumerField = consumerEntityFields.fields[consumerEntityFieldname]; 
+//            if(consumerField.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//            {
+//                entityFields.push([consumerField.name, consumerEntity + "." + consumerField.name]);
+//            }
+//        }
+//    }
 }
 result.object(entityFields);
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
index 306692120f..96700d8074 100644
--- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js
@@ -3,7 +3,7 @@ import("DuplicateScanner_lib");
 
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-
+var recordBlockSize = 5;
 let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
 let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
@@ -14,7 +14,7 @@ logging.log("Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
 logging.log("Neu berechnen von PERSON Dubletten -> ");
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, 5,
+DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
 duplicateFieldsConfig, resultFields, null);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index 7bddf37d22..aa83ebf708 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -237,23 +237,15 @@ import("JditoFilter_lib");
 //##################################single scanForDuplicates############################################
 
 
-//var filterName = "PersonDuplicates";
-//var targetEntity = "Person_entity";
-//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
-//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
-//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
-//
-//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
-//                            + " join PERSON on PERSONID = PERSON_ID"
-//                            + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"
-//                            + " where Condition for the record to be checked";
-//let targetRecordsData = db.table(queryPersonFieldData);
-//
-//let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]);
-////The first field in this Array must always be the configured id field.
-//let idField = entityFieldValuesRay[0][0];
-//let idValue = entityFieldValuesRay[0][1];
-//
+var filterName = "PersonDuplicates";
+var targetEntity = "Person_entity";
+
+//Values to check, the same fields as configured
+let valuesToCheck = {};
+valuesToCheck["CONTACTID"] = "c7ddf982-0e58-4152-b82b-8f5673b0b729";
+valuesToCheck["FIRSTNAME"] = "Tim";
+valuesToCheck["GENDER"] = "m                                   ";
+
 //let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
 //{
 //    let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
@@ -273,19 +265,17 @@ import("JditoFilter_lib");
 //    //reformat results to same structure as before
 //    return pPossibleDuplicatesRay;
 //};
-//
-////The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
-//DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields,
-//idField, idValue, formatToJsonAndCallWsCallback);
 
-//let entityStructure = project.getEntityStructure("Person_entity");
-//logging.log("entityStructure -> " + entityStructure);
-//logging.log("entityStructure.fields -> " + entityStructure.fields);
+//The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
+let pPossibleDuplicatesRay = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck, null);
+
+logging.log(" pPossibleDuplicatesRay-> " + pPossibleDuplicatesRay.length);
+
+for (let i = 0; i < pPossibleDuplicatesRay.length; i++) 
+{
+    logging.log("pPossibleDuplicatesRay[i] -> " + pPossibleDuplicatesRay[i]);
+}
 
-//for (fieldname in model.fields) 
-//{ 
-//    field = model.fields[fieldname];
-//}
 
 
 //##############################################################################
@@ -296,58 +286,86 @@ import("JditoFilter_lib");
 //logging.log("Title: " + model.title); 
 //logging.log("Description: " + model.description); 
 //logging.log("UsePermissions: " + model.usePermissions); 
+//
 //for (fieldname in model.fields) 
 //{ 
 //    field = model.fields[fieldname]; 
-//    logging.log(" Name: " + field.name); 
-//    logging.log(" Type: " + field.fieldType); 
-//    logging.log(" Title: " + field.title); 
-//    logging.log(" Description: " + field.description); 
-//    logging.log(" UsePermissions: " + field.usePermissions); 
+//    if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//    {
+//        logging.log(" Name: " + field.name); 
+//        logging.log(" Type: " + field.fieldType); 
+//        logging.log(" Title: " + field.title); 
+//        logging.log(" Description: " + field.description); 
+//        logging.log(" UsePermissions: " + field.usePermissions); 
+//        logging.log("###################### -> ");
+//    }
+//    if(field.fieldType == project.ENTITYFIELDTYPE_CONSUMER)
+//    {
+//        let consumerEntity = field.entityName;
+//        
+//        if(consumerEntity == null || consumerEntity == "")
+//            continue;
+//        
+//        let consumerEntityFields = project.getEntityStructure(consumerEntity);
+//        for (consumerEntityFieldname in consumerEntityFields.fields) 
+//        { 
+//            consumerField = consumerEntityFields.fields[consumerEntityFieldname]; 
+//            if(consumerField.fieldType == project.ENTITYFIELDTYPE_FIELD)
+//            {
+//                logging.log(" Name: " + consumerField.name); 
+//                logging.log(" Type: " + consumerField.fieldType); 
+//                logging.log(" Title: " + consumerField.title); 
+//                logging.log(" Description: " + consumerField.description); 
+//                logging.log(" UsePermissions: " + consumerField.usePermissions); 
+//                logging.log("###################### -> ");
+//            }
+//        }
+//    }
 //}
 
+//##############################################################################
+
+//var model = project.getEntityStructure("Person_entity"); 
+//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+//
+//let combinedData = []
+//let entityFieldsToLoad = [];
+//for (field in duplicateFieldsConfig) 
+//{
+//    let entityFieldName = duplicateFieldsConfig[field][0];
+//    let isIdField = duplicateFieldsConfig[field][1];
+//    let useForIndexSearch = duplicateFieldsConfig[field][2];
+//    let entityFieldData = model[entiyFieldName];
+//    combinedData.push(entityFieldName, isIdField, useForIndexSearch, entityFieldData);
+//}
+//
+//var filterName = "PersonDuplicates";
+//var targetEntity = "Person_entity";
+//DuplicateScannerUtils.LoadEntityRecords(targetEntity, entityFieldsToLoad, 0, 50);
 
+//Beispiel 1: 
+//Feld mit verknüpftem Consumer
 
-//##############################################################################
-var targetEntity = "Person_entity";
-filterName = "PersonDuplicates";
-let pRecordsBlockSize = 5;
-let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+//[entity, feldname, consumerName, ProviderName]
+//let test = ["Communication_entity", "STANDARD_EMAIL_COMMUNICATION", "EmailCommunications", "EmailCommunications"];
+//
+//let testrow = entities.createConfigForLoadingRows()
+//                        .fields([test[1]])
+//                        .entity(test[0])
+//                        .provider(test[3])
+//                        .addParameter("ContactId_param", "d4c1bec3-656f-45ec-ae03-1c4d729d99fe")
+//                        //.uid()
+//let resultTest = entities.getRows(testrow);
+//logging.log("resultTest -> " + JSON.stringify(resultTest));
 
-let entityFieldsToLoad = [];
-for (field in duplicateFieldsConfig) 
-{
-    entityFieldsToLoad.push(duplicateFieldsConfig[field][0]);
-}
-logging.log("entityFieldsToLoad -> " + entityFieldsToLoad);
-let loadInitialRecordsConfig = entities.createConfigForLoadingRows()
-                                        .entity(targetEntity)
-                                        .fields(entityFieldsToLoad)
-                                        .count(pRecordsBlockSize);
 
-let targetRecords = entities.getRows(loadInitialRecordsConfig);
-logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
 
-let currentRecordIndex = pRecordsBlockSize;
-while(targetRecords.length > 0)
-{
-    //scan for duplicates
-    
-    if(targetRecords.length < pRecordsBlockSize)
-    {
-        logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
-        break;
-    }
-    
-    let loadNextRows = entities.createConfigForLoadingRows()
-                                .entity(targetEntity)
-                                .fields(entityFieldsToLoad)
-                                .count(pRecordsBlockSize)
-                                .startrow(currentRecordIndex);
-    logging.log("Nächster Block wird geladen mit startRow -> " + currentRecordIndex);
-    currentRecordIndex += pRecordsBlockSize;
-    targetRecords = entities.getRows(loadNextRows);
-    logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
-    
-    
-}
\ No newline at end of file
+//Beispiel 2: 
+//Feld direkt von anderem Entity
+//let testrow = entities.createConfigForLoadingRows()
+//                        .fields(["ZIP"])
+//                        .entity("Address_entity")
+//                        .uid("1a67eaa7-21da-4a18-97ab-755ac5cb74f7")
+//
+//let resultTest = entities.getRows(testrow);
+//logging.log("resultTest Beispiel 2 -> " + JSON.stringify(resultTest));
\ No newline at end of file
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 1dce76989e..9ba5c72f30 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -1,3 +1,4 @@
+import("JditoFilter_lib");
 import("system.process");
 import("system.util");
 import("system.vars");
@@ -251,25 +252,17 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  * <br />
  * @param {String} pFilterName Name of the filter to use
  * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
- * @param {String} pQueryTargetRecords Query which holds the values that are being used as configured in the filter.
- * @param {String} pDuplicateFieldsConfig The index field config. Use "DuplicateScannerUtils.LoadIndexFieldsConfiguration"
- * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
+ * @param {String} pRecordsBlockSize The values which are checked get loaded in blocks.
  * @param {String} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter.
- * @param {String[]} pDuplicateFieldsConfig The configuration of the fields and their usage. @see DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration
- * Important: The first element has to be the id field!
  * @return {Int} Count of duplicate clusters created
  * 
  * @example
  * var filterName = "PersonDuplicates";
  * var targetEntity = "Person_entity";
- *
- * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+ * var recordBlockSize = 5;
+ * 
  * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
- * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
- * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
- *                             + " join PERSON on PERSONID = PERSON_ID"
- *                             + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
- *
+ * 
  * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
  * {
  *      let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
@@ -291,25 +284,46 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  * };
  *
  * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
- * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
- * duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
+ * 
+ * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, pRecordsBlockSize, formatToJsonAndCallWsCallback);
  *
  * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
  */
 DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
-pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWebserviceCallback)
+pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
 {   
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
     let alreadyIdentifiedIds = [];
     
-    let entityFieldsToLoad = [];
-    for (field in duplicateFieldsConfig) 
-    {
-        entityFieldsToLoad.push(duplicateFieldsConfig[field][0]);
-    }
-
-    logging.log("entityFieldsToLoad -> " + entityFieldsToLoad);
+    let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+    let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
+    let filterFieldConfigs = DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
+    logging.log("filterFieldConfigs -> " + filterFieldConfigs);
+    
+    let entityFieldsToLoad = DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
+    
+    logging.log("entityFieldsToLoad initial-> " + entityFieldsToLoad);
+    
+    if(filterFieldConfigs.length > 0)
+    {
+        let INDEX_FILTER_FIELD_NAME = 0;
+        for (let i = 0; i < filterFieldConfigs.length; i++) 
+        {
+            let filterFieldConfig = filterFieldConfigs[i];
+            let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
+            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
+            {
+                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                duplicateFieldsConfig.push(filterFieldConfig);
+                entityFieldsToLoad.push(filterFieldName);
+            }
+        }
+    }    
+    
+    logging.log("entityFieldsToLoad vollsätndig-> " + entityFieldsToLoad);
+    logging.log("duplicateFieldsConfig vollsätndig-> " + duplicateFieldsConfig);
     
     let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad, 0, pRecordsBlockSize);
     logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
@@ -318,7 +332,7 @@ pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWe
     while(targetRecords.length > 0)
     {
         foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pFilterName, pTargetEntity, targetRecords,
-                                pDuplicateFieldsConfig, pResultFields, useExternalWebservice, 
+                                duplicateFieldsConfig, resultFields, useExternalWebservice, 
                                 pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
         logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
         alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
@@ -339,6 +353,36 @@ pRecordsBlockSize, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWe
     }
 }
 
+DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig = function(pDuplicateFieldsConfig)
+{
+    let entityFieldsToLoad = [];
+    for (field in pDuplicateFieldsConfig) 
+    {
+        entityFieldsToLoad.push(pDuplicateFieldsConfig[field][0]);
+    }
+    return entityFieldsToLoad;
+}
+DuplicateScannerUtils._getFieldConfigsFromFilterFields = function(pFilterName, pTargetEntity)
+{
+    let resultFields = [];
+    let filterFields = [];
+    let filters = DuplicateScannerUtils.loadFilters(pFilterName, pTargetEntity)
+
+    for (let i = 0; i < filters.length; i++) 
+    {
+        let filter = JSON.parse(filters[i][0]).filter;
+        let fields = JditoFilterUtils.getFilterFields(filter.childs);
+        filterFields = filterFields.concat(fields);
+    }
+    for (let i = 0; i < filterFields.length; i++) 
+    {
+        let filterField = filterFields[i];
+        resultFields.push([filterField, 0, 0]);
+    }
+    return resultFields;
+}
+
+
 DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTargetRecordsData,
 pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
 {
@@ -412,16 +456,12 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
 
 DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad)
 {
-    logging.log("pTargetEntity -> " + pTargetEntity);
-    logging.log("pEntityFields -> " + pEntityFields);
-    logging.log("pCountRecordsToLoad -> " + pCountRecordsToLoad);
-    logging.log("pStartRow -> " + pStartRow);
     let getRowsConfig = entities.createConfigForLoadingRows()
                                 .entity(pTargetEntity)
                                 .fields(pEntityFields)
                                 .count(pCountRecordsToLoad)
                                 .startrow(pStartRow);
-    return entities.getRows(getRowsConfig)          
+    return entities.getRows(getRowsConfig);
 }
 /* 
  * Loads the configured resultfields as array
@@ -479,11 +519,7 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  *
  * @param {String} pFilterName Name of the filter
  * @param {String} pTargetEntity Respective target entity
- * @param {String[[]]} pFilterFieldValueRays Array of Arrays containing the name of a used field and its value.
- * Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order
- * @param {String[]} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
- * @param {String} pRecordIdFieldToIgnore Name of the id field e.g. the contact id in case of a Person duplicate
- * @param {String} pRecordIdValueToIgnore Value to the id field
+ * @param {String} pValuesToCheck An object with key value pairs which hold the name of the entity field as key and it's value as value. See the example "valuesToCheck"
  * @param {function} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter.
  * If the function is called is based on the configuration of the current scanner
  * @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured resultfields, if an external webservices was used
@@ -492,20 +528,12 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  * @example
  * var filterName = "PersonDuplicates";
  * var targetEntity = "Person_entity";
- * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
- * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
- * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
- *
- * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
- *                             + " join PERSON on PERSONID = PERSON_ID"
- *                             + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"
- *                             + " where Condition for the record to be checked";
- * let targetRecordsData = db.table(queryPersonFieldData);
- *
- * let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]);
- * //The first field in this Array must always be the configured id field.
- * let idField = entityFieldValuesRay[0][0];
- * let idValue = entityFieldValuesRay[0][1];
+ * 
+ * //Values to check, the same fields as configured
+ * let valuesToCheck = {};
+ * valuesToCheck["CONTACTID"] = "c7ddf982-0e58-4152-b82b-8f5673b0b729";
+ * valuesToCheck["FIRSTNAME"] = "Tim";
+ * valuesToCheck["GENDER"] = "m                                   ";
  *
  * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
  * {
@@ -528,16 +556,42 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  * };
  *
  * //The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
- * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields,
- * idField, idValue, formatToJsonAndCallWsCallback);
+ * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck,
+ * formatToJsonAndCallWsCallback);
  */
-DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
-pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback)
+DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck,
+pFormatValuesConsumeWebserviceCallback)
 {
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
+    let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity);
+
+    let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(pFilterName, pTargetEntity);
+    let entityFieldsToLoad = DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
+
+    let filterFieldConfigs = DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
+    if(filterFieldConfigs.length > 0)
+    {
+        let INDEX_FILTER_FIELD_NAME = 0;
+        for (let i = 0; i < filterFieldConfigs.length; i++)
+        {
+            let filterFieldConfig = filterFieldConfigs[i];
+            let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
+            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
+            {
+                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                duplicateFieldsConfig.push(filterFieldConfig);
+            }
+        }
+    }
+    
+    let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, pValuesToCheck);
+    //The first field in this Array must always be the configured id field.
+    let idField = entityFieldValuesRay[0][0];
+    let idValue = entityFieldValuesRay[0][1];
 
     return _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
-                        pFilterFieldValueRays, pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore,
+                        entityFieldValuesRay, resultFields, idField, idValue,
                          pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
 }
 
@@ -674,7 +728,7 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
  * Creates an array of arrays containing the entity field name paired with it's value.
  *
  * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration()
- * @param {[]} pTargetRecordData One record containing the values for the configured fields. Has to be in the same order as the fields in the first parameter
+ * @param {{"key", "value"}} pTargetRecordData One record containing the values for the configured fields. It's in the format of {"key(=EntityFieldName", "Value"}
  * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]]
  * @example
  * pDuplicateFieldsConfig
diff --git a/process/RebuildDuplicatesCache_serverProcess/process.js b/process/RebuildDuplicatesCache_serverProcess/process.js
index 74346c3308..5a6cd7aaac 100644
--- a/process/RebuildDuplicatesCache_serverProcess/process.js
+++ b/process/RebuildDuplicatesCache_serverProcess/process.js
@@ -23,32 +23,28 @@ import("DuplicateScanner_lib");
 
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
-var resultFieldsIdFieldName = "CONTACTID";
-var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from CONTACT"
-                            + " join PERSON on PERSON.PERSONID = CONTACT.PERSON_ID";
-var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"];
+var recordBlockSize = 5;
 
-DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
+DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
-tmpFieldsInFilterRay, resultFieldsIdFieldName);
+DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
 
 
 // Build Organisation duplicate cache
 
-filterName = "OrganisationDuplicates";
-targetEntity = "Organisation_entity";
-resultFieldsIdFieldName = "CONTACTID";
-queryPersonContactIds = "select CONTACTID, ORGANISATION.\"NAME\" from ORGANISATION"
-                            + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID"
-                            + " where CONTACTID != '0'";
-tmpFieldsInFilterRay = ["CONTACTID", "NAME"];
-
-DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
-
-DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
-tmpFieldsInFilterRay, resultFieldsIdFieldName);
-
-DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
+//filterName = "OrganisationDuplicates";
+//targetEntity = "Organisation_entity";
+//resultFieldsIdFieldName = "CONTACTID";
+//queryPersonContactIds = "select CONTACTID, ORGANISATION.\"NAME\" from ORGANISATION"
+//                            + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID"
+//                            + " where CONTACTID != '0'";
+//tmpFieldsInFilterRay = ["CONTACTID", "NAME"];
+//
+//DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)
+//
+//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,
+//tmpFieldsInFilterRay, resultFieldsIdFieldName);
+//
+//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
-- 
GitLab