diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index 51c0509b4581764d88f3d5c7835f9c96e4da8936..83120c0790a5c699875f68fe3c4fb5001a737723 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -351,10 +351,15 @@ import("JditoFilter_lib");
 //logging.log("resultTest Beispiel 2 -> " + JSON.stringify(resultTest));
 
 
+//indexsearch.runIndexer(["Person"]);
 
 
 
 
+let resultClusterId = DuplicateScannerUtils.GetClusterWithDuplicates(["7a34d9d0-04c7-478c-a8e2-f584fe625c45", "c7ddf982-0e58-4152-b82b-8f5673b0b729"]);
+logging.log("resultClusterId -> " + resultClusterId);
+
+
 
 
 
@@ -365,4 +370,3 @@ import("JditoFilter_lib");
 
 
 
-indexsearch.runIndexer(["Person"]);
\ No newline at end of file
diff --git a/entity/Person_entity/entityfields/standard_city/valueProcess.js b/entity/Person_entity/entityfields/standard_city/valueProcess.js
index 28a15bb7feaa65f8beb1820139154b5da74f2e9e..46497b23a641c6a6d16d65ae2f3399bb23b82dcc 100644
--- a/entity/Person_entity/entityfields/standard_city/valueProcess.js
+++ b/entity/Person_entity/entityfields/standard_city/valueProcess.js
@@ -16,12 +16,12 @@ result.string(standardCity);
 
 function getStandardAddressInfos(pContactId, pSelectField)
 {
-    //select `ADDRESS`, `CITY`, `ZIP` from contact
-    //join address on address.`ADDRESSID` = contact.`ADDRESS_ID`
-    //where contact.`CONTACTID` = '8a12642b-9a29-4c35-9b79-f3d58122d140'
+    //select ADDRESS, CITY, ZIP from contact
+    //join address on address.ADDRESSID = contact.ADDRESS_ID
+    //where contact.CONTACTID = '8a12642b-9a29-4c35-9b79-f3d58122d140'
     let addressValuesCondition = SqlCondition.begin()
-    .and("contact.`CONTACTID` = '" + pContactId + "'")
-    .buildSql("select `" + pSelectField + "` from contact join address on address.`ADDRESSID` = contact.`ADDRESS_ID`");
+    .and("contact.CONTACTID = '" + pContactId + "'")
+    .buildSql("select " + pSelectField + " from contact join address on address.ADDRESSID = contact.ADDRESS_ID");
     //return db.array(db.ROW, addressValuesCondition);
     return db.cell(addressValuesCondition);
 }
diff --git a/entity/Person_entity/entityfields/standard_street/valueProcess.js b/entity/Person_entity/entityfields/standard_street/valueProcess.js
index 2061b88b442a18afca68a7217fa90ebfede6c29c..43dcd73906c7a074825d7b86469576d7ed10bd31 100644
--- a/entity/Person_entity/entityfields/standard_street/valueProcess.js
+++ b/entity/Person_entity/entityfields/standard_street/valueProcess.js
@@ -16,12 +16,12 @@ result.string(standardStreet);
 
 function getStandardAddressInfos(pContactId, pSelectField)
 {
-    //select `ADDRESS`, `CITY`, `ZIP` from contact
-    //join address on address.`ADDRESSID` = contact.`ADDRESS_ID`
-    //where contact.`CONTACTID` = '8a12642b-9a29-4c35-9b79-f3d58122d140'
+    //select ADDRESS, CITY, ZIP from contact
+    //join address on address.ADDRESSID = contact.ADDRESS_ID
+    //where contact.CONTACTID = '8a12642b-9a29-4c35-9b79-f3d58122d140'
     let addressValuesCondition = SqlCondition.begin()
-    .and("contact.`CONTACTID` = '" + pContactId + "'")
-    .buildSql("select `" + pSelectField + "` from contact join address on address.`ADDRESSID` = contact.`ADDRESS_ID`");
+    .and("contact.CONTACTID = '" + pContactId + "'")
+    .buildSql("select " + pSelectField + " from contact join address on address.ADDRESSID = contact.ADDRESS_ID");
     //return db.array(db.ROW, addressValuesCondition);
     return db.cell(addressValuesCondition);
 }
diff --git a/entity/Person_entity/entityfields/standard_zip/valueProcess.js b/entity/Person_entity/entityfields/standard_zip/valueProcess.js
index c97b56d031207ae964a3877d8b2eb84b3417bd2b..05e1d96c34f083d957e910f73fc0997013378437 100644
--- a/entity/Person_entity/entityfields/standard_zip/valueProcess.js
+++ b/entity/Person_entity/entityfields/standard_zip/valueProcess.js
@@ -16,12 +16,12 @@ result.string(standardZip);
 
 function getStandardAddressInfos(pContactId, pSelectField)
 {
-    //select `ADDRESS`, `CITY`, `ZIP` from contact
-    //join address on address.`ADDRESSID` = contact.`ADDRESS_ID`
-    //where contact.`CONTACTID` = '8a12642b-9a29-4c35-9b79-f3d58122d140'
+    //select ADDRESS, CITY, ZIP from contact
+    //join address on address.ADDRESSID = contact.ADDRESS_ID
+    //where contact.CONTACTID = '8a12642b-9a29-4c35-9b79-f3d58122d140'
     let addressValuesCondition = SqlCondition.begin()
-    .and("contact.`CONTACTID` = '" + pContactId + "'")
-    .buildSql("select `" + pSelectField + "` from contact join address on address.`ADDRESSID` = contact.`ADDRESS_ID`");
+    .and("contact.CONTACTID = '" + pContactId + "'")
+    .buildSql("select " + pSelectField + " from contact join address on address.ADDRESSID = contact.ADDRESS_ID");
     //return db.array(db.ROW, addressValuesCondition);
     return db.cell(addressValuesCondition);
 }
diff --git a/entity/Person_entity/recordcontainers/db/onDBInsert.js b/entity/Person_entity/recordcontainers/db/onDBInsert.js
index fe326f3b6f8b0e657a29903966bd2f16216fa058..f4b551df3f59cf3f0ffb617b4f346e1b32c95b96 100644
--- a/entity/Person_entity/recordcontainers/db/onDBInsert.js
+++ b/entity/Person_entity/recordcontainers/db/onDBInsert.js
@@ -1,25 +1,59 @@
+import("system.logging");
 import("system.vars");
 import("DataPrivacy_lib");
 import("DuplicateScanner_lib");
-import("system.indexsearch");
+import("system.notification");
+import("system.project");
 
+let targetEntity = "Person_entity";
 let contactId = vars.get("$local.uid");
 DataPrivacyUtils.notifyNeedDataPrivacyUpdate(contactId);
 
-//let valuesToCheck = {};
-//valuesToCheck["CONTACTID"] = contactId;
-//valuesToCheck["FIRSTNAME"] = "Tim";
-//valuesToCheck["GENDER"] = "m                                   ";
-//
-//let scanResults = DuplicateScannerUtils.ScanForDuplicates("PersonDuplicates", "Person_entity",
-//valuesToCheck, null);
-//
-//let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
-//
-////todo wenn ergebnisse da, füge ein
-//let insertCount = DuplicateScannerUtils.InsertScanResultsInNewCluster(indexResultFields);
+let valuesToCheck = {};
+valuesToCheck["CONTACTID"] = contactId;
+valuesToCheck["FIRSTNAME"] = vars.get("$field.FIRSTNAME");
+valuesToCheck["GENDER"] = vars.get("$field.GENDER");
+
+var entityModel = project.getEntityStructure(targetEntity);
+
+let fieldValue = "";
+let entityFields = [];
+for (fieldname in entityModel.fields) 
+{ 
+    field = entityModel.fields[fieldname]; 
+    if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
+    {
+        fieldValue = vars.get("$field." + field.name);
+        
+        
+        
+        if(fieldValue != null && fieldValue != "")
+        {
+            logging.log("field.name -> " + field.name);
+            valuesToCheck[field.name] = fieldValue;
+            logging.log("field value -> " + fieldValue);
+        }
+    }
+}
+
+let scanResults = DuplicateScannerUtils.ScanForDuplicates("PersonDuplicates", targetEntity,
+valuesToCheck, null);
+logging.log("scanResults.length -> " + scanResults.length);
+
+if(scanResults.length > 0)
+{
+    let insertCount = DuplicateScannerUtils.CacheNewScanResults(contactId, scanResults, targetEntity);
+    logging.log("insertCount -> " + insertCount);
+}
+
 //
-////Wenn duplikate gefunden => gebe Notification
+//if(insertCount > 0)
+//{
+//    let notificationConfig = notification.createConfig().caption("Es wurden dubletten gefunden");
+//    notification.addNotificationWith(notificationConfig);
+//}
+//    notification.addNotification(util.getNewUUID(), null, null, null, "BulkMailSent", notification.PRIO_NORMAL, 2, notification.STATE_UNSEEN, [user], message, description);
+
 
 
 
diff --git a/entity/Person_entity/recordcontainers/index/subProcess.js b/entity/Person_entity/recordcontainers/index/subProcess.js
index 3086ca07fc283d6e05c29e2717a1298f067bc89f..1dd4e2c5f2a45430d792db1fe99a5833de9bb51e 100644
--- a/entity/Person_entity/recordcontainers/index/subProcess.js
+++ b/entity/Person_entity/recordcontainers/index/subProcess.js
@@ -63,8 +63,8 @@ function getStandardAddressInfos(pContactId, pSelectField)
     //join address on address.`ADDRESSID` = contact.`ADDRESS_ID`
     //where contact.`CONTACTID` = '8a12642b-9a29-4c35-9b79-f3d58122d140'
     let addressValuesCondition = SqlCondition.begin()
-    .and("contact.`CONTACTID` = '" + pContactId + "'")
-    .buildSql("select `" + pSelectField + "` from contact join address on address.`ADDRESSID` = contact.`ADDRESS_ID`");
+    .and("contact.CONTACTID = '" + pContactId + "'")
+    .buildSql("select " + pSelectField + " from contact join address on address.ADDRESSID = contact.ADDRESS_ID");
     //return db.array(db.ROW, addressValuesCondition);
     return db.cell(addressValuesCondition);
 }
\ No newline at end of file
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 07c6b8da0b18b9d8133ddeb1760c91d1b0ab597c..4e42133a40b7242b7e6ef53a428945e78e183777 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -274,7 +274,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  *          {
  *              let entityFieldName = resultFields[b];
  *              let indexFieldName = indexResultFields[entityFieldName];
- *              //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
+ *              ////logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
  *              //format values
  *          }
  *      }
@@ -300,11 +300,11 @@ pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
 
     //load all entity fields used in the prefilter
     let filterFieldConfigs = _DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
-    logging.log("filterFieldConfigs -> " + filterFieldConfigs);
+    //logging.log("filterFieldConfigs -> " + filterFieldConfigs);
 
     let entityFieldsToLoad = _DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
 
-    logging.log("entityFieldsToLoad initial-> " + entityFieldsToLoad);
+    //logging.log("entityFieldsToLoad initial-> " + entityFieldsToLoad);
 
     //If the fields from the prefilter aren't in the entity fields to load, add them manually
     if(filterFieldConfigs.length > 0)
@@ -314,21 +314,21 @@ pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
         {
             let filterFieldConfig = filterFieldConfigs[i];
             let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
-            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            //logging.log("filterFieldConfig -> " +filterFieldConfig );
             if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
             {
-                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                //logging.log("Noch nicht vorhanden, hinzufügen -> ");
                 duplicateFieldsConfig.push(filterFieldConfig);
                 entityFieldsToLoad.push(filterFieldName);
             }
         }
     }
     
-    logging.log("entityFieldsToLoad vollsätndig-> " + entityFieldsToLoad);
-    logging.log("duplicateFieldsConfig vollsätndig-> " + duplicateFieldsConfig);
+    //logging.log("entityFieldsToLoad vollsätndig-> " + entityFieldsToLoad);
+    //logging.log("duplicateFieldsConfig vollsätndig-> " + duplicateFieldsConfig);
     
     let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad, 0, pRecordsBlockSize);
-    logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
+    //logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
 
     let currentRecordIndex = pRecordsBlockSize;
     while(targetRecords.length > 0)
@@ -336,29 +336,29 @@ pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
         foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pFilterName, pTargetEntity, targetRecords,
                                 duplicateFieldsConfig, resultFields, useExternalWebservice,
                                 pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
-        logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
+        //logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
         alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
-        logging.log("Gesamte ignorierListe -> " + alreadyIdentifiedIds);
+        //logging.log("Gesamte ignorierListe -> " + alreadyIdentifiedIds);
         if(targetRecords.length < pRecordsBlockSize)
         {
-            logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
+            //logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
             break;
         }
 
-        logging.log("Nächster Block wird geladen mit startRow -> " + currentRecordIndex);
+        //logging.log("Nächster Block wird geladen mit startRow -> " + currentRecordIndex);
 
         targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad,
         currentRecordIndex, pRecordsBlockSize);
 
         currentRecordIndex += pRecordsBlockSize;
-        logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
+        //logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
     }
 }
 
 DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTargetRecordsData,
 pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
 {
-    logging.log("in ScanRecords -> ");
+    //logging.log("in ScanRecords -> ");
     let foundDuplicateIds = [];
 
     //If the contact id loading query has no results, stop.
@@ -368,27 +368,27 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
 
      //First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
      //Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
-    logging.log("configured pResultFields -> " + pResultFields);
+    //logging.log("configured pResultFields -> " + pResultFields);
     var duplicatesToInsertQueries = [];
     for (b = 0; b < pTargetRecordsData.length; b++)
     {
-        logging.log("Nächster Datensatz in RebuildDuplicatesCache -> " + b);
-//        logging.log("Neuer Record -> " + pTargetRecordsData[b]);
+        //logging.log("Nächster Datensatz in RebuildDuplicatesCache -> " + b);
+//        //logging.log("Neuer Record -> " + pTargetRecordsData[b]);
 
-        logging.log("pTargetRecordsData[b] -> " + JSON.stringify(pTargetRecordsData[b]));
+        //logging.log("pTargetRecordsData[b] -> " + JSON.stringify(pTargetRecordsData[b]));
         let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pDuplicateFieldsConfig, pTargetRecordsData[b]);
-        logging.log("Werte des Datensatzes entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay));
+        //logging.log("Werte des Datensatzes entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay));
 
         //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic
         let idField = entityFieldValuesRay[0][0];
         let idValue = entityFieldValuesRay[0][1];
-//        logging.log("idField -> " + idField);
-//        logging.log("idValue -> " + idValue);
-            logging.log("pTargetRecordsData[b][idField] -> " + pTargetRecordsData[b][idField]);
+//        //logging.log("idField -> " + idField);
+//        //logging.log("idValue -> " + idValue);
+            //logging.log("pTargetRecordsData[b][idField] -> " + pTargetRecordsData[b][idField]);
         //If the current Id has already been identified, continue
         if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][idField]) > -1)
             {
-                logging.log("Id schon behandelt, continue; -> ");
+                //logging.log("Id schon behandelt, continue; -> ");
                 continue;
             }
 
@@ -397,17 +397,17 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
 
         if(foundDuplicates == null || foundDuplicates.length == 0)
         {
-            logging.log("Keine Datensätze gefunden  continue;-> ");
+            //logging.log("Keine Datensätze gefunden  continue;-> ");
             continue;
         }
-        logging.log("foundDuplicates.length nach _scanForDuplicates -> " + foundDuplicates.length);
+        //logging.log("foundDuplicates.length nach _scanForDuplicates -> " + foundDuplicates.length);
 
         //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron.
 
         for (let i = 0; i < foundDuplicates.length; i++)
         {
             let localId = foundDuplicates[i][indexsearch.FIELD_ID];
-            logging.log("foundDuplicates[i] -> " + foundDuplicates[i]);
+            //logging.log("foundDuplicates[i] -> " + foundDuplicates[i]);
             foundDuplicateIds.push(localId);
         }
 
@@ -415,43 +415,72 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
 
         //The duplicates list contains only the found duplicates to the original id, therefore it get's added manually
         foundDuplicateIds.push(pTargetRecordsData[b][idField]);
-        logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
-        logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds));
+        //logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
+        //logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds));
 
         let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity)
         duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay);
+        foundDuplicateIds = [];
     }
-    logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries));
+    //logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries));
     db.inserts(duplicatesToInsertQueries, db.getCurrentAlias(), 10 * datetime.ONE_MINUTE);
     return foundDuplicateIds;
 }
 
-DuplicateScannerUtils.CacheNewScanResults = function(pScanResults, pTargetEntity)
+DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pScanResults, pTargetEntity)
 {
-    //prüfe ob ein cluster besteht mit den gleichen ids
-    //wenn ja dort einfügen
-    //ansonsten neuer cluster mit diesen ids
-    
     let duplicateIds = [];
-    //Run thru every duplicate result and read out the resultfields
+    //Run thru every duplicate result and read out the id. 
+    //Do it now to have a simple array on all usages lateron.
     for (let i = 0; i < pScanResults.length; i++)
     {
         let duplicateContactId = pScanResults[i][indexsearch.FIELD_ID];
         duplicateIds.push(duplicateContactId);
     }
+    //logging.log("duplicateIds -> " + duplicateIds);
+    let clusterId = DuplicateScannerUtils.GetClusterWithDuplicates(duplicateIds);
+        //logging.log("clusterId -> " + clusterId);
     
-    let clusterId = DuplicateScannerUtils.GetClusterWithIdenticalDuplicates(duplicateIds);
-    let insertQueriesRay = [];
-    
-    if(clusterId == null || clusterId == "")
+    let idRayToInsert = [];
+    if(clusterId == undefined || clusterId == null || clusterId == "")
     {
-        insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(duplicateIds, pTargetEntity)
+        idRayToInsert = duplicateIds;
     }
     else
+        idRayToInsert.push(pNewRecordId);
+    
+    //logging.log("idRayToInsert -> " + idRayToInsert);
+    insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(idRayToInsert, pTargetEntity, clusterId)
+
+    return db.inserts(insertQueriesRay);
+}
+
+/* 
+ * Searches for a cluster which contains the duplicates specified by the parameterized array. <br />
+ * Based on the assumption that a duplicate can only ever be in one cluster at any given time,
+ * it selects the clusters id distinct an returns it.
+ * 
+ * @param {String[]} pDuplicateIds Duplicate ids which should be in the same cluster
+ * @returns {String} Id of the cluster which contains all given duplicate ids or null
+ */
+DuplicateScannerUtils.GetClusterWithDuplicates = function(pDuplicateIds)
+{
+//    select distinct CLUSTERID from DUPLICATECLUSTERS dc
+//    where CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS where DUPLICATEID = '7a34d9d0-04c7-478c-a8e2-f584fe625c45')
+//    and CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS where DUPLICATEID = 'c7ddf982-0e58-4152-b82b-8f5673b0b729')
+    if(pDuplicateIds.length < 1)
+        return null;
+    
+    let clusterIdConditionBuilder = SqlCondition.begin();
+    
+    for (let i = 0; i < pDuplicateIds.length; i++) 
     {
-        insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(duplicateIds, pTargetEntity, clusterId)
+        clusterIdConditionBuilder.andSqlCondition("CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS where DUPLICATEID = '" + pDuplicateIds[i] + "')", "1=2")
     }
-    return db.inserts(insertQueriesRay);
+
+    clusterIdCondition = clusterIdConditionBuilder.buildSql("select distinct CLUSTERID from DUPLICATECLUSTERS", "1=2");
+
+    return db.cell(clusterIdCondition);
 }
 
 DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad)
@@ -548,7 +577,7 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  *         {
  *             let entityFieldName = resultFields[b];
  *             let indexFieldName = indexResultFields[entityFieldName];
- *             //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
+ *             ////logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
  *             //format values
  *         }
  *     }
@@ -579,10 +608,10 @@ pFormatValuesConsumeWebserviceCallback)
         {
             let filterFieldConfig = filterFieldConfigs[i];
             let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
-            logging.log("filterFieldConfig -> " +filterFieldConfig );
+            //logging.log("filterFieldConfig -> " +filterFieldConfig );
             if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
             {
-                logging.log("Noch nicht vorhanden, hinzufügen -> ");
+                //logging.log("Noch nicht vorhanden, hinzufügen -> ");
                 duplicateFieldsConfig.push(filterFieldConfig);
             }
         }
@@ -610,7 +639,7 @@ pFormatValuesConsumeWebserviceCallback)
  *  let entityResultFields = ["LASTNAME"];
  *  let entityIndexFields = DuplicateScannerUtils.TranslateEntityToIndexFields("Person_entity", entityResultFields);
  *
- *  logging.log(entityIndexFields["LASTNAME"]);//=> "LASTNAME_value"
+ *  //logging.log(entityIndexFields["LASTNAME"]);//=> "LASTNAME_value"
  */
 DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields)
 {
@@ -655,15 +684,15 @@ DuplicateScannerUtils.MergePerson = function(pSourceContactId, pTargetContactId)
     deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeletePersonAndContactQuery(sourcePersonId, pSourceContactId));
     deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId));
 
-    logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias));
-    logging.log("deleteStatements -> " + JSON.stringify(deleteStatements));
+    //logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias));
+    //logging.log("deleteStatements -> " + JSON.stringify(deleteStatements));
 
     let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias);
     let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS");
     let deletedRows = db.deletes(deleteStatements)
 
-    logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias);
-    logging.log("deletedRows -> " + deletedRows);
+    //logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias);
+    //logging.log("deletedRows -> " + deletedRows);
 
     DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId);
 
@@ -692,15 +721,15 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
     deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteOrganisationAndContactQuery(sourceOrganisationId, pSourceContactId));
     deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId));
 
-    logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias));
-    logging.log("deleteStatements -> " + JSON.stringify(deleteStatements));
+    //logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias));
+    //logging.log("deleteStatements -> " + JSON.stringify(deleteStatements));
 
     let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias);
     let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS");
     let deletedRows = db.deletes(deleteStatements)
 
-    logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias);
-    logging.log("deletedRows -> " + deletedRows);
+    //logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias);
+    //logging.log("deletedRows -> " + deletedRows);
 
     DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId);
 
@@ -735,8 +764,8 @@ DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsC
     */
     for (a = 0; a < pDuplicateFieldsConfig.length; a++)
     {
-//        logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]);
-//        logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]);
+//        //logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]);
+//        //logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]);
         let entityField = pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD];
         entityFieldValuesRay.push([entityField, pTargetRecordData[entityField], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]])
     }
@@ -796,17 +825,19 @@ pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesCons
     //the filter of said records to ignore
     configuredFilters = [ignoredRecordFilter].concat(configuredFilters);
 
-    logging.log("Found filters -> " + configuredFilters);
+    //logging.log("Found filters -> " + configuredFilters);
+
 
     let preFilter = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pFilterFieldValueRays);
 
-    logging.log("preFilter welcher Elemente im erlaubten bereich ausgibt -> " + preFilter);
+    //logging.log("preFilter welcher Elemente im erlaubten bereich ausgibt -> " + preFilter);
 
     if(preFilter == null)
         return null;
 
     possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pResultFields, 100);
-
+    //logging.log("possibleDuplicates -> " + JSON.stringify(possibleDuplicates));
+    
     if(possibleDuplicates == null)
         return null;
 
@@ -815,11 +846,11 @@ pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesCons
     if(pUseExternalWebservice && possibleDuplicates.length > 0 && pFormatValuesConsumeWebserviceCallback != null)
         possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]);
 
-//    logging.log("pTargetEntity -> " + pTargetEntity);
-//    logging.log("preFilter -> " + preFilter);
-//    logging.log("pFilterFieldValueRays -> " + pFilterFieldValueRays);
-//    logging.log("pRecordIdFieldToIgnore -> " + pRecordIdFieldToIgnore);
-//    logging.log("possibleDuplicates -> " + possibleDuplicates);
+//    //logging.log("pTargetEntity -> " + pTargetEntity);
+//    //logging.log("preFilter -> " + preFilter);
+//    //logging.log("pFilterFieldValueRays -> " + pFilterFieldValueRays);
+//    //logging.log("pRecordIdFieldToIgnore -> " + pRecordIdFieldToIgnore);
+//    //logging.log("possibleDuplicates -> " + possibleDuplicates);
 
     return possibleDuplicates;
 }
@@ -880,21 +911,21 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
             filterValuesObject[pFilterFieldValueRays[a][0]] = pFilterFieldValueRays[a][1];
         }
 
-        logging.log("filterValuesObject zum füllen des jsons -> " + JSON.stringify(filterValuesObject));
+        //logging.log("filterValuesObject zum füllen des jsons -> " + JSON.stringify(filterValuesObject));
         /*
          * Insert the values into the current filter. Has to be here so that only the new filter
          * and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
          */
-        logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
+        //logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
         filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse);
-        //logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
+        ////logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
 
         if(i == 0)
             combinedFilter = filter.filter;
         else
         {
-            //logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
-            //logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
+            ////logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
+            ////logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
             //Extend the current combined filter with the next filter condition to further refine the results
             //It seems to always contain one child element at the root
             //combinedFilter.childs.push(filter.filter.childs);
@@ -902,39 +933,39 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
             combinedFilter.childs = newCombinedFilterChilds;
         }
 
-        //logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
+        ////logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
 
         filter.filter = combinedFilter;
-        logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
+        //logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
 
         //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
         //Only load "indexsearch.FIELD_ID" and a resultSet size of 1
         let searchResult = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [],
         [], 1);//todo use again after this has been fixed!! insert the local id after fix
-        logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
+        //logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
 
 //        if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?!
 //        {
 //            for (let i = 0; i < searchResult[indexsearch.HITS].length; i++)
 //            {
-//                logging.log("Treffer Nr -> " + i);
+//                //logging.log("Treffer Nr -> " + i);
 //                //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c}
 //                let localId = searchResult[indexsearch.HITS][i]["_local_id_"];
 //                let firstname = searchResult[indexsearch.HITS][i]["firstname_value"];
 //                let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"];
-//                logging.log("localId -> " + localId);
-//                logging.log("firstname -> " + firstname);
-//                logging.log("indexGroup -> " + indexGroup);
+//                //logging.log("localId -> " + localId);
+//                //logging.log("firstname -> " + firstname);
+//                //logging.log("indexGroup -> " + indexGroup);
 //            }
 //        }
 
         let totalHits = searchResult[indexsearch.TOTALHITS]
 
-        logging.log("totalHits -> " + totalHits);
+        //logging.log("totalHits -> " + totalHits);
 
         if(totalHits > maxResultsThreshold)
         {
-            logging.log("zu viele rows gefundenn nächsten Filter anwenden -> totalHits:" + totalHits + " maxResultsThreshold:" + maxResultsThreshold);
+            //logging.log("zu viele rows gefundenn nächsten Filter anwenden -> totalHits:" + totalHits + " maxResultsThreshold:" + maxResultsThreshold);
             //Found more rows than allowed by the threshold, run next filter to narrow the results
             continue;
         }
@@ -948,7 +979,7 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
             return JSON.stringify(filter);
         }
     }
-    logging.log("zu viele rows und keine filter mehr -> ");
+    //logging.log("zu viele rows und keine filter mehr -> ");
     return null;
 }
 
@@ -968,14 +999,14 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
 _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows)
 {
     let parsedFilterAsPatternTerm = indexsearch.buildQueryFromSearchCondition(pPreFilterJson);
-    //logging.log("pTargetEntity -> " + pTargetEntity);
-    logging.log("pResultFields -> " + pResultFields);
-    //logging.log("pResultSetRows -> " + pResultSetRows);
+    ////logging.log("pTargetEntity -> " + pTargetEntity);
+    //logging.log("pResultFields -> " + pResultFields);
+    ////logging.log("pResultSetRows -> " + pResultSetRows);
     let indexQuery = indexsearch.createIndexQuery()
                                 .setPattern(parsedFilterAsPatternTerm)
                                 .setEntities([pTargetEntity])
                                 //.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID")
-                                .setRows(pResultSetRows);
+                                //.setRows(pResultSetRows);
 
     indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields);
 
@@ -987,18 +1018,18 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson
     {
         let filterPatternString = indexsearch.buildPatternString(filterPatternConfig);
         indexQuery = indexQuery.addFilter(filterPatternString);
-        logging.log("real filter PatternString -> " + filterPatternString);
+        //logging.log("real filter PatternString -> " + filterPatternString);
     }
-    logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm);
+    //logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm);
 
     if(filterPatternConfig == null && pEntityFieldValueRays.length > 0)
     {
-        logging.log("FilterPattern ist null aber es gibt pEntityFieldValueRays -> Die Felder sollten genutzt werden, beinhalten aber keine Werte");
+        //logging.log("FilterPattern ist null aber es gibt pEntityFieldValueRays -> Die Felder sollten genutzt werden, beinhalten aber keine Werte");
         return null;
     }
     else
     {
-        logging.log("Starte Indexsuche -> ");
+        //logging.log("Starte Indexsuche -> ");
         return indexsearch.searchIndex(indexQuery);
     }
 }
@@ -1042,7 +1073,7 @@ _DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields)
         }
 //    }
 
-    logging.log("resultFields -> " + resultFields);
+    //logging.log("resultFields -> " + resultFields);
     return pIndexQuery;
 }
 
@@ -1061,7 +1092,7 @@ _DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRay
     let INDEX_CONFIG_USE_FOR_SEARCH = 2;
     let filterPatternConfig = null;
 
-    logging.log("pEntityFieldValueRays.length -> " + pEntityFieldValueRays.length);
+    //logging.log("pEntityFieldValueRays.length -> " + pEntityFieldValueRays.length);
     if(pEntityFieldValueRays.length > 0)
     {
         filterPatternConfig = indexsearch.createPatternConfig();
@@ -1073,12 +1104,12 @@ _DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRay
             if(pEntityFieldValueRays[i][INDEX_CONFIG_USE_FOR_SEARCH] == 0 || entityFieldValue == "")
                 continue;
 
-            logging.log("entityFieldValue -> " + entityFieldValue);
-            logging.log("entityFieldName -> " + entityFieldName);
+            //logging.log("entityFieldValue -> " + entityFieldValue);
+            //logging.log("entityFieldName -> " + entityFieldName);
 
             let indexField = indexsearch.lookupIndexField(pTargetEntity, entityFieldName);
 
-            logging.log("indexField -> " + indexField);
+            //logging.log("indexField -> " + indexField);
             var filterTerm = indexsearch.createTerm(entityFieldValue)
                                         .setIndexField(indexField)
                                         .setFuzzySearchFactor(0);
@@ -1114,7 +1145,7 @@ _DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicat
         let duplicatesToInsertQueries = [];
         let cols = ["ID", "CLUSTERID", "DUPLICATEID", "TARGET_ENTITY"];
 
-        if(pClusterId == null || pClusterId == "")
+        if(pClusterId == undefined || pClusterId == null || pClusterId == "")
             pClusterId = util.getNewUUID();
             
         for (i = 0; i < pDuplicatesRay.length; i++)
@@ -1340,7 +1371,7 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p
 
             if(fieldValue == null)
             {
-                logging.log("Duplicate Scan: Requested value for field " + fieldName + " not present in the provided valueslist");
+                //logging.log("Duplicate Scan: Requested value for field " + fieldName + " not present in the provided valueslist");
                 continue;
             }