diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index c3def7988e47203c2cb79b8f481e65f4867f3057..41c8545687dc007b306be2a488cd3981fcf645f0 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -199,8 +199,17 @@ let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
 logging.log("Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
+let formatToJsonAndCallWs = function(pPossibleDuplicatesRay) {
+    logging.log("pPossibleDuplicatesRay  -> " + pPossibleDuplicatesRay);
+    logging.log(" bin in functioin -> " + pPossibleDuplicatesRay.length);
+    if(pPossibleDuplicatesRay.length > 0)
+        logging.log("DemoIdFeld -> " + pPossibleDuplicatesRay[0][indexsearch.FIELD_ID]);
+    logging.log("wieder draußen -> ");
+    return pPossibleDuplicatesRay;
+};
+
 logging.log("Neu berechnen von PERSON Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
-duplicateFieldsConfig);
+duplicateFieldsConfig, formatToJsonAndCallWs);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 4c78a524df941776224d42af785b4a2553d7ad1b..655259211f758582256e3b505009a39907e39036 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -229,7 +229,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForId = function(pClusterRecordId)
  * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);<br />
  */
 DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
-pQueryTargetRecords, pDuplicateFieldsConfig)
+pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
 {   
     
     logging.log("in RebuildDuplicatesCache -> ");
@@ -271,22 +271,20 @@ pQueryTargetRecords, pDuplicateFieldsConfig)
         logging.log("idValue -> " + idValue);
         
         let foundDuplicates = DuplicateScannerUtils.ScanForDuplicates(pFilterName, pTargetEntity,
-                                entityFieldValuesRay, resultFields, idField, idValue)
+                                entityFieldValuesRay, resultFields, idField, idValue, pCallExternalWebserviceFunction)
         
         //        logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
         logging.log("foundDuplicates -> " + foundDuplicates);
-        logging.log("foundDuplicates[indexsearch.TOTALHITS] -> " + foundDuplicates[indexsearch.TOTALHITS]);
-        if(foundDuplicates == null || foundDuplicates[indexsearch.TOTALHITS] == 0)
+        logging.log("foundDuplicates.length -> " + foundDuplicates.length);
+        if(foundDuplicates == null || foundDuplicates.length == 0)
             continue;
         
  
         //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron.
         let foundDuplicateIds = [];
-        for (let i = 0; i < foundDuplicates[indexsearch.HITS].length; i++) 
+        for (let i = 0; i < foundDuplicates.length; i++)
         {
-//            logging.log("i -> " + i);
-//            logging.log("foundDuplicates[pRecordIdFieldToIgnore] -> " + foundDuplicates[i][pRecordIdFieldToIgnore]);
-            let localId = foundDuplicates[indexsearch.HITS][i][indexsearch.FIELD_ID];
+            let localId = foundDuplicates[i][indexsearch.FIELD_ID];
             logging.log("localId der gefundenen Dublette-> " + localId);
             foundDuplicateIds.push(localId);
         }
@@ -383,7 +381,7 @@ DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity)
 
 
 DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
-pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
+pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCallExternalWebserviceFunction)
 {
     let possibleDuplicates = [];
     let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity);
@@ -404,7 +402,11 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
     
     if(useExternalWebservice)
     {
+        possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
+        possibleDuplicates = possibleDuplicates[indexsearch.HITS];
         
+        if(possibleDuplicates.length > 0)
+            possibleDuplicates = pCallExternalWebserviceFunction.apply(this, [possibleDuplicates]);
     }
     else
     {
@@ -416,9 +418,9 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
         //(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows)
         //run actual index duplicate search
         possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
-        logging.log("possibleDuplicates -> " + possibleDuplicates);
+        possibleDuplicates = possibleDuplicates[indexsearch.HITS];
     }
-    
+    logging.log("possibleDuplicates -> " + possibleDuplicates);
     return possibleDuplicates;
 }
 
@@ -487,7 +489,7 @@ DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountChar
         //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
         //Only load "indexsearch.FIELD_ID" and a resultSet size of 1
         let searchResult = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], 
-        ["Person_entity.FIRSTNAME"], 1);
+        [], 1);//todo use again after this has been fixed!! insert the local id after fix
         logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
         logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]);
         
@@ -643,7 +645,16 @@ DuplicateScannerUtils.BuildSelectFieldsFromFieldConfig = function(pIndexFieldsCo
 }
 
 
-
+DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields)
+{
+    let entityIndexFields = {};
+    for (let i = 0; i < pEntityFields.length; i++) 
+    {
+         let indexField = indexsearch.lookupIndexField(pEntityName, pEntityFields[i]);
+         entityIndexFields[pEntityFields[i]] = indexField;
+    }
+    return entityIndexFields;
+}