diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 02512e08de22dfba3106b780533ea54c8a3cb3f2..a56f9865cd848aad2550bd59a49c34da6a8bd22d 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -1,7 +1,6 @@
+import("ActivityTask_lib");
 import("KeywordRegistry_basic");
 import("system.translate");
-import("ActivityTask_lib");
-import("Contact_lib");
 import("system.datetime");
 import("JditoFilter_lib");
 import("system.process");
@@ -33,9 +32,9 @@ function DuplicateScannerUtils() {}
 DuplicateScannerUtils.loadFilters = function(pFilterName, pTargetEntity)
 {
     let query = "select FILTER_CONDITION, COUNT_CHARACTERS_TO_USE, MAX_RESULTS_THRESHOLD from DUPLICATESCANNERPREFILTERCONFIG"
-                + " join DUPLICATESCANNER on DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID"
-                + " where FILTER_NAME = '" + pFilterName + "'"
-                + " and ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'";
+    + " join DUPLICATESCANNER on DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID"
+    + " where FILTER_NAME = '" + pFilterName + "'"
+    + " and ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'";
     return db.table(query);
 }
 
@@ -49,7 +48,7 @@ DuplicateScannerUtils.loadFilters = function(pFilterName, pTargetEntity)
 DuplicateScannerUtils.DeleteCachedDuplicate = function(pDuplicateId)
 {
     let query = "select count(ID), CLUSTERID from DUPLICATECLUSTERS"
-    + " where CLUSTERID = (select CLUSTERID from DUPLICATECLUSTERS where DUPLICATEID = '"+ pDuplicateId +"')"
+    + " where CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS where DUPLICATEID = '"+ pDuplicateId +"')"
     + " and DUPLICATEID != '"+ pDuplicateId +"'"
     + " group by CLUSTERID";
 
@@ -107,9 +106,9 @@ DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations = function(pTargetEntit
     let INDEX_OLD_CLUSTERID = 1;
 
     let query = "select dc1.CLUSTERID, ud.CLUSTERID from UNRELATEDDUPLICATES ud"
-                + " join DUPLICATECLUSTERS dc1 on dc1.DUPLICATEID = ud.SOURCEDUPLICATEID"
-                + " join DUPLICATECLUSTERS dc2 on dc2.DUPLICATEID = ud.UNRELATEDDUPLICATEID"
-                + " where dc1.TARGET_ENTITY = '" + pTargetEntity + "'";
+    + " join DUPLICATECLUSTERS dc1 on dc1.DUPLICATEID = ud.SOURCEDUPLICATEID"
+    + " join DUPLICATECLUSTERS dc2 on dc2.DUPLICATEID = ud.UNRELATEDDUPLICATEID"
+    + " where dc1.TARGET_ENTITY = '" + pTargetEntity + "'";
 
     let newIdOldIdRay = db.table(query);
     let updateStatements = [];
@@ -151,10 +150,7 @@ DuplicateScannerUtils.CreateUnrelatedDuplicateRelation = function(pSourceContact
     let newUid = util.getNewUUID();
     let columns = ["ID", "SOURCEDUPLICATEID", "UNRELATEDDUPLICATEID", "CLUSTERID"];
     let values = [newUid, pSourceContactId, pUnrelatedContactId, pClusterId];
-    logging.log("in  -> CreateUnrelatedDuplicateRelation");
-    logging.log("columns -> " + columns);
-    logging.log("values -> " + values);
-    
+
     return db.insertData("UNRELATEDDUPLICATES", columns, null, values);
 }
 
@@ -215,11 +211,11 @@ DuplicateScannerUtils.DeleteAllUnrelatedDuplicateRelations = function(pDuplicate
 DuplicateScannerUtils.GetCachedDuplicatesForDuplicateId = function(pDuplicateId)
 {
     let querySelectDuplicateContactIds = "select DUPLICATEID from DUPLICATECLUSTERS"
-                                        + " where CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS"
-                                        + " where DUPLICATEID = '"+ pDuplicateId +"')"
-                                        + " and DUPLICATEID != '"+ pDuplicateId +"'"
-                                        + " and DUPLICATEID not in (select UNRELATEDDUPLICATEID from UNRELATEDDUPLICATES where SOURCEDUPLICATEID = '"+ pDuplicateId +"')"
-                                        + " and DUPLICATEID not in (select SOURCEDUPLICATEID from UNRELATEDDUPLICATES where UNRELATEDDUPLICATEID = '"+ pDuplicateId +"')";
+    + " where CLUSTERID in (select CLUSTERID from DUPLICATECLUSTERS"
+    + " where DUPLICATEID = '"+ pDuplicateId +"')"
+    + " and DUPLICATEID != '"+ pDuplicateId +"'"
+    + " and DUPLICATEID not in (select UNRELATEDDUPLICATEID from UNRELATEDDUPLICATES where SOURCEDUPLICATEID = '"+ pDuplicateId +"')"
+    + " and DUPLICATEID not in (select SOURCEDUPLICATEID from UNRELATEDDUPLICATES where UNRELATEDDUPLICATEID = '"+ pDuplicateId +"')";
 
     return db.array(db.COLUMN, querySelectDuplicateContactIds);
 }
@@ -281,7 +277,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  *          {
  *              let entityFieldName = resultFields[b];
  *              let indexFieldName = indexResultFields[entityFieldName];
- *              ////logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
+ *              //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
  *              //format values
  *          }
  *      }
@@ -297,7 +293,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId)
  * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
  */
 DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
-pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
+    pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
 {
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
     let alreadyIdentifiedIds = [];
@@ -341,29 +337,29 @@ pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
     while(targetRecords.length > 0)
     {
         foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pFilterName, pTargetEntity, targetRecords,
-                                duplicateFieldsConfig, resultFields, useExternalWebservice,
-                                pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
+            duplicateFieldsConfig, resultFields, useExternalWebservice,
+            pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
         //logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
         alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
         //logging.log("Gesamte ignorierListe -> " + alreadyIdentifiedIds);
         if(targetRecords.length < pRecordsBlockSize)
         {
-            //logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
+            logging.log("weniger records geladen als möglich => Ende der verfügbaren Records -> Abbrechen");
             break;
         }
 
         //logging.log("Nächster Block wird geladen mit startRow -> " + currentRecordIndex);
 
         targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad,
-        currentRecordIndex, pRecordsBlockSize);
+            currentRecordIndex, pRecordsBlockSize);
 
         currentRecordIndex += pRecordsBlockSize;
-        //logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
+    //logging.log("Nächster Block geladen mit targetRecords -> " + JSON.stringify(targetRecords));
     }
 }
 
 DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTargetRecordsData,
-pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
+    pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
 {
     //logging.log("in ScanRecords -> ");
     let foundDuplicateIds = [];
@@ -373,14 +369,14 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
     if(pTargetRecordsData.length <= 0)
         return;
 
-     //First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
-     //Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
+    //First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
+    //Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
     //logging.log("configured pResultFields -> " + pResultFields);
     var duplicatesToInsertQueries = [];
     for (b = 0; b < pTargetRecordsData.length; b++)
     {
         //logging.log("Nächster Datensatz in RebuildDuplicatesCache -> " + b);
-//        //logging.log("Neuer Record -> " + pTargetRecordsData[b]);
+        //        logging.log("Neuer Record -> " + pTargetRecordsData[b]);
 
         //logging.log("pTargetRecordsData[b] -> " + JSON.stringify(pTargetRecordsData[b]));
         let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pDuplicateFieldsConfig, pTargetRecordsData[b]);
@@ -389,18 +385,18 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
         //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic
         let idField = entityFieldValuesRay[0][0];
         let idValue = entityFieldValuesRay[0][1];
-//        //logging.log("idField -> " + idField);
-//        //logging.log("idValue -> " + idValue);
-            //logging.log("pTargetRecordsData[b][idField] -> " + pTargetRecordsData[b][idField]);
+        //        logging.log("idField -> " + idField);
+        //        logging.log("idValue -> " + idValue);
+        //logging.log("pTargetRecordsData[b][idField] -> " + pTargetRecordsData[b][idField]);
         //If the current Id has already been identified, continue
         if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][idField]) > -1)
-            {
-                //logging.log("Id schon behandelt, continue; -> ");
-                continue;
-            }
+        {
+            //logging.log("Id schon behandelt, continue; -> ");
+            continue;
+        }
 
         let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
-                                entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice)
+            entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice)
 
         if(foundDuplicates == null || foundDuplicates.length == 0)
         {
@@ -439,18 +435,18 @@ pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesCons
  * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned.
  * 
  * @param {String} pNewRecordId The id of the record which was used to scan for duplicates
- * @param {String[]} pScanResults The scan results containing the Duplicate ids which are used to search for a cluster containing them
+ * @param {String[]} pDuplicateIds Duplicate ids used to search for a cluster containing them
  * @param {String} pTargetEntity Entity which has been configured
  * @returns {String} A clusterid if a matching cluster has been found, otherwise ""
  */
-DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pScanResults, pTargetEntity)
+DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds, pTargetEntity)
 {
     let duplicateIds = [];
     //Run thru every duplicate result and read out the id. 
     //Do it now to have a simple array on all usages lateron.
-    for (let i = 0; i < pScanResults.length; i++)
+    for (let i = 0; i < pDuplicateIds.length; i++)
     {
-        let duplicateContactId = pScanResults[i][indexsearch.FIELD_ID];
+        let duplicateContactId = pDuplicateIds[i][indexsearch.FIELD_ID];
         duplicateIds.push(duplicateContactId);
     }
     
@@ -473,22 +469,6 @@ DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pScanResults,
     return db.inserts(insertQueriesRay);
 }
 
-/* 
- * Loads the configured block size for the specified scanner
- * 
- * @param {String} pFilterName Name of the filter to use
- * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
- * @returns {Integer} Blocksize of the specified scanner
- */
-DuplicateScannerUtils.GetBlockSizeForScanner = function(pFilterName, pTargetEntity)
-{
-    let blockSizeCondition = SqlCondition.begin()
-                                         .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-                                         .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-                                         .buildSql("select BLOCK_SIZE from DUPLICATESCANNER");
-    return db.cell(blockSizeCondition);
-}
-
 /* 
  * Searches for a cluster which contains the duplicates specified by the parameterized array. <br />
  * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned.
@@ -518,8 +498,8 @@ DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds
         return RESULT_NO_CLUSTER_FOUND;
     
     let duplicatesInClusterCondition = SqlCondition.begin()
-                                                    .andPrepare("DUPLICATECLUSTERS.CLUSTERID", foundClusterId)
-                                                    .buildSql("select DUPLICATEID from DUPLICATECLUSTERS");
+    .andPrepare("DUPLICATECLUSTERS.CLUSTERID", foundClusterId)
+    .buildSql("select DUPLICATEID from DUPLICATECLUSTERS");
     let duplicatesInCluster = db.array(db.COLUMN, duplicatesInClusterCondition);
     //logging.log("länge gefundener cluster -> " + duplicatesInCluster.length);
     //logging.log("länge der angefragten ids -> " + pDuplicateIds.length);
@@ -538,10 +518,10 @@ DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds
 DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad)
 {
     let getRowsConfig = entities.createConfigForLoadingRows()
-                                .entity(pTargetEntity)
-                                .fields(pEntityFields)
-                                .count(pCountRecordsToLoad)
-                                .startrow(pStartRow);
+    .entity(pTargetEntity)
+    .fields(pEntityFields)
+    .count(pCountRecordsToLoad)
+    .startrow(pStartRow);
     return entities.getRows(getRowsConfig);
 }
 /*
@@ -554,9 +534,9 @@ DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields,
 DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity)
 {
     let duplicateResultFields = SqlCondition.begin()
-        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-        .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID"
+    .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+    .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+    .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID"
         , "1=2");
 
     return db.array(db.COLUMN, duplicateResultFields);
@@ -576,9 +556,9 @@ DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity)
 DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTargetEntity)
 {
     let duplicateIndexFieldConfigurations = SqlCondition.begin()
-        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-        .buildSql("select dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsic.DUPLICATESCANNER_ID"
+    .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+    .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+    .buildSql("select dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsic.DUPLICATESCANNER_ID"
         , "1=2", "order by dsic.IS_ID_FIELD desc");
 
     return db.table(duplicateIndexFieldConfigurations);
@@ -586,7 +566,7 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
 
 /*
  * Scans for duplicates based on the configured prefilters and the configured indexfields.<br />
- * !! All configured fields values have to be present in "pValuesToCheck" !!<br />
+ * All configured fields values have to be present in "pValuesToCheck"<br />
  * First the prefilters get applied one after another until the count of the returned data is in the allowed threshold.<br />
  * Then, the duplicate search using the index starts. All fields which have been configured will be used here.<br />
  *<br />
@@ -629,7 +609,7 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  *         {
  *             let entityFieldName = resultFields[b];
  *             let indexFieldName = indexResultFields[entityFieldName];
- *             ////logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
+ *             //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
  *             //format values
  *         }
  *     }
@@ -643,16 +623,19 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg
  * formatToJsonAndCallWsCallback);
  */
 DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck,
-pFormatValuesConsumeWebserviceCallback)
+    pFormatValuesConsumeWebserviceCallback)
 {
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
     let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity);
 
     let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(pFilterName, pTargetEntity);
+    //logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
     let entityFieldsToLoad = _DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
-
+    //logging.log("entityFieldsToLoad " + entityFieldsToLoad)
     //If the fields from the prefilter aren't in the entity fields to load, add them manually
     let filterFieldConfigs = _DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
+    //logging.log("filterFieldConfigs " + filterFieldConfigs)
+
     if(filterFieldConfigs.length > 0)
     {
         let INDEX_FILTER_FIELD_NAME = 0;
@@ -660,6 +643,7 @@ pFormatValuesConsumeWebserviceCallback)
         {
             let filterFieldConfig = filterFieldConfigs[i];
             let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
+            //logging.log("filterFieldName" + filterFieldName)
             //logging.log("filterFieldConfig -> " +filterFieldConfig );
             if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
             {
@@ -671,12 +655,13 @@ pFormatValuesConsumeWebserviceCallback)
 
     let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, pValuesToCheck);
     //The first field in this Array must always be the configured id field.
+    //logging.log("ray " + entityFieldValuesRay.toSource())
     let idField = entityFieldValuesRay[0][0];
     let idValue = entityFieldValuesRay[0][1];
 
     return _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
-                        entityFieldValuesRay, resultFields, idField, idValue,
-                         pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
+        entityFieldValuesRay, resultFields, idField, idValue,
+        pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
 }
 
 /*
@@ -691,7 +676,7 @@ pFormatValuesConsumeWebserviceCallback)
  *  let entityResultFields = ["LASTNAME"];
  *  let entityIndexFields = DuplicateScannerUtils.TranslateEntityToIndexFields("Person_entity", entityResultFields);
  *
- *  //logging.log(entityIndexFields["LASTNAME"]);//=> "LASTNAME_value"
+ *  logging.log(entityIndexFields["LASTNAME"]);//=> "LASTNAME_value"
  */
 DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields)
 {
@@ -704,13 +689,14 @@ DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEnti
         if(entityFieldName.startsWith(pEntityName))
             entityFieldName = entityFieldName.replace(pEntityName + ".", "");
 
-         let indexField = indexsearch.lookupIndexField(pEntityName, entityFieldName);
-         entityIndexFields[pEntityFields[i]] = indexField;
+        let indexField = indexsearch.lookupIndexField(pEntityName, entityFieldName);
+        entityIndexFields[pEntityFields[i]] = indexField;
     }
     return entityIndexFields;
 }
 
 /*
+ *
  * Merges the source person into the target person. 
  * This 
  * - replaces the source's with the target's contactid in a predefined set of tables.
@@ -737,7 +723,7 @@ DuplicateScannerUtils.MergePerson = function(pSourceContactId, pTargetContactId)
     updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateResetStandardCommunications(pSourceContactId));
     updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId));
     updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId));
-    
+
     updateStatementsSystemAlias = updateStatementsSystemAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosSystemAlias, pSourceContactId, pTargetContactId));
 
     deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId));
@@ -776,6 +762,7 @@ DuplicateScannerUtils.CreateMergeSuccessActivity = function(pSourceContactId, pT
 DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetContactId)
 {
     let updateStatements = [];
+    let updateStatementsSystemAlias = [];
     let deleteStatements = [];
 
     let querySourceOrganisationId = SqlCondition.begin()
@@ -786,7 +773,7 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
     var tableInfosCurrentAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias();
     var tableInfosSystemAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias();
 
-    updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateResetStandardCommunications(pSourceContactId));
+	updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateResetStandardCommunications(pSourceContactId));
     updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId));
     updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId));
 
@@ -839,12 +826,12 @@ DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsC
     */
     for (a = 0; a < pDuplicateFieldsConfig.length; a++)
     {
-//        //logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]);
-//        //logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]);
+        //        logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]);
+        //        logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]);
         let entityField = pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD];
         entityFieldValuesRay.push([entityField, pTargetRecordData[entityField], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]])
     }
-    return entityFieldValuesRay;
+    return entityFieldValuesRay.length > 0 ? entityFieldValuesRay : [["", ""]];
 }
 
 function _DuplicateScannerUtils() {}
@@ -962,9 +949,9 @@ pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesCons
 _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetEntity)
 {
     let scannerUseExternalWebserviceQuery = SqlCondition.begin()
-        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-        .buildSql("select EXTERNAL_SERVICE_USAGE_ALLOWED from DUPLICATESCANNER"
+    .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+    .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+    .buildSql("select EXTERNAL_SERVICE_USAGE_ALLOWED from DUPLICATESCANNER"
         , "1=2");
 
     let isUseWebservice = db.cell(scannerUseExternalWebserviceQuery);
@@ -989,11 +976,13 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE
  */
 _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pFilterFieldValueRays)
 {
+//    logging.log("#pFilterFieldValueRays#" + JSON.stringify(pFilterFieldValueRays))
     var combinedFilter = {};
 
     for (i = 0; i < pFilterCountCharactersToUseRay.length; i++) 
     {
-        let filter = pFilterCountCharactersToUseRay[i][INDEX_FILTER_CONDITION];
+        var filter = pFilterCountCharactersToUseRay[i][INDEX_FILTER_CONDITION];
+//        logging.log("#1#filter>>" + filter)
         let countCharsOfValueToUse = pFilterCountCharactersToUseRay[i][INDEX_COUNT_CHARS_TO_USE];
         let maxResultsThreshold = pFilterCountCharactersToUseRay[i][INDEX_MAX_RESULTS_THRESHOLD];
         
@@ -1015,14 +1004,14 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
          */
         //logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
         filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse);
-        ////logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
+        //logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
 
         if(i == 0)
             combinedFilter = filter.filter;
         else
         {
-            ////logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
-            ////logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
+            //logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
+            //logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
             //Extend the current combined filter with the next filter condition to further refine the results
             //It seems to always contain one child element at the root
             //combinedFilter.childs.push(filter.filter.childs);
@@ -1030,7 +1019,7 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
             combinedFilter.childs = newCombinedFilterChilds;
         }
 
-        ////logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
+        //logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
 
         filter.filter = combinedFilter;
         //logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
@@ -1038,23 +1027,23 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
         //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
         //Only load "indexsearch.FIELD_ID" and a resultSet size of 1
         let searchResult = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [],
-        [], 1);//todo use again after this has been fixed!! insert the local id after fix
+            [], 1);//todo use again after this has been fixed!! insert the local id after fix
         //logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
 
-//        if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?!
-//        {
-//            for (let i = 0; i < searchResult[indexsearch.HITS].length; i++)
-//            {
-//                //logging.log("Treffer Nr -> " + i);
-//                //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c}
-//                let localId = searchResult[indexsearch.HITS][i]["_local_id_"];
-//                let firstname = searchResult[indexsearch.HITS][i]["firstname_value"];
-//                let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"];
-//                //logging.log("localId -> " + localId);
-//                //logging.log("firstname -> " + firstname);
-//                //logging.log("indexGroup -> " + indexGroup);
-//            }
-//        }
+        //        if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?!
+        //        {
+        //            for (let i = 0; i < searchResult[indexsearch.HITS].length; i++)
+        //            {
+        //                logging.log("Treffer Nr -> " + i);
+        //                //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c}
+        //                let localId = searchResult[indexsearch.HITS][i]["_local_id_"];
+        //                let firstname = searchResult[indexsearch.HITS][i]["firstname_value"];
+        //                let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"];
+        //                logging.log("localId -> " + localId);
+        //                logging.log("firstname -> " + firstname);
+        //                logging.log("indexGroup -> " + indexGroup);
+        //            }
+        //        }
 
         let totalHits = searchResult[indexsearch.TOTALHITS]
 
@@ -1142,28 +1131,28 @@ _DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields)
     for (let i = 0; i < pResultFields.length; i++)
     {
         if(pResultFields[i] == indexsearch.FIELD_ID
-        || pResultFields[i] == indexsearch.FIELD_TITLE
-        || pResultFields[i] == indexsearch.FIELD_TYPE
-        || pResultFields[i] == indexsearch.FIELD_DESCRIPTION)
-        {
+            || pResultFields[i] == indexsearch.FIELD_TITLE
+            || pResultFields[i] == indexsearch.FIELD_TYPE
+            || pResultFields[i] == indexsearch.FIELD_DESCRIPTION)
+            {
             resultIndexFields.push(pResultFields[i]);
         }
         else
             resultFields.push(pResultFields[i]);
     }
 
-//    if(resultIndexFields.length == 0 && resultFields.length == 0)
-//        pIndexQuery = pIndexQuery.addResultIndexFields([indexsearch.FIELD_ID]);
-//    else
-//    {
-        if(resultIndexFields.length > 0)
-            pIndexQuery = pIndexQuery.addResultIndexFields(resultIndexFields);
+    //    if(resultIndexFields.length == 0 && resultFields.length == 0)
+    //        pIndexQuery = pIndexQuery.addResultIndexFields([indexsearch.FIELD_ID]);
+    //    else
+    //    {
+    if(resultIndexFields.length > 0)
+        pIndexQuery = pIndexQuery.addResultIndexFields(resultIndexFields);
 
-        if(resultFields.length > 0)
-        {
-            pIndexQuery = pIndexQuery.addResultFields(resultFields);
-        }
-//    }
+    if(resultFields.length > 0)
+    {
+        pIndexQuery = pIndexQuery.addResultFields(resultFields);
+    }
+    //    }
 
     //logging.log("resultFields -> " + resultFields);
     return pIndexQuery;
@@ -1203,8 +1192,8 @@ _DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRay
 
             //logging.log("indexField -> " + indexField);
             var filterTerm = indexsearch.createTerm(entityFieldValue)
-                                        .setIndexField(indexField)
-                                        .setFuzzySearchFactor(0);
+            .setIndexField(indexField)
+            .setFuzzySearchFactor(0);
 
             filterPatternConfig.and(filterTerm);
         }
@@ -1234,20 +1223,20 @@ _DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicat
  */
 _DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay, pTargetEntity, pClusterId)
 {
-        let duplicatesToInsertQueries = [];
-        let cols = ["ID", "CLUSTERID", "DUPLICATEID", "TARGET_ENTITY"];
+    let duplicatesToInsertQueries = [];
+    let cols = ["ID", "CLUSTERID", "DUPLICATEID", "TARGET_ENTITY"];
 
-        if(pClusterId == undefined || pClusterId == null || pClusterId == "")
-            pClusterId = util.getNewUUID();
+    if(pClusterId == undefined || pClusterId == null || pClusterId == "")
+        pClusterId = util.getNewUUID();
             
-        for (i = 0; i < pDuplicatesRay.length; i++)
-        {
-            let newId = util.getNewUUID();
-            let valuesToInsert = [newId, pClusterId, pDuplicatesRay[i], pTargetEntity];
+    for (i = 0; i < pDuplicatesRay.length; i++)
+    {
+        let newId = util.getNewUUID();
+        let valuesToInsert = [newId, pClusterId, pDuplicatesRay[i], pTargetEntity];
 
-            duplicatesToInsertQueries.push(["DUPLICATECLUSTERS", cols, null, valuesToInsert]);
-        }
-        return duplicatesToInsertQueries;
+        duplicatesToInsertQueries.push(["DUPLICATECLUSTERS", cols, null, valuesToInsert]);
+    }
+    return duplicatesToInsertQueries;
 }
 
 /*
@@ -1273,10 +1262,11 @@ _DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery = functio
     var selectAssignableIdsOfTargetContactQuery = "select " + pAssignableIdColumn
                                                     + " from " + pTableName
                                                     + " where " + pContactIdColumn + " = '" + pTargetContactId + "'";
+
     let updateCondition = pAssignableIdColumn
-                            + " not in"
-                            + " (" + selectAssignableIdsOfTargetContactQuery + ")"
-                            + " and " + pContactIdColumn + " = '" + pSourceContactId + "'";
+    + " not in"
+    + " (" + selectAssignableIdsOfTargetContactQuery + ")"
+    + " and " + pContactIdColumn + " = '" + pSourceContactId + "'";
 
     return [[pTableName, [pContactIdColumn], null, [pTargetContactId], updateCondition]];
 }
@@ -1284,13 +1274,17 @@ _DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery = functio
 
 _DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery = function (pTableName, pContactIdColumn, pAssignableIdColumn, pSourceContactId, pTargetContactId, updateStatements)
 {
+    
+//DELETE FROM CAMPAIGNPARTICIPANT 
+// WHERE ( CAMPAIGN_ID in (select ab.CAMPAIGN_ID from (select CAMPAIGN_ID, CONTACT_ID from CAMPAIGNPARTICIPANT) ab where ab.CONTACT_ID = '64a51ec3-e75d-4415-8aa2-a00a1e9be0b0') and CAMPAIGN_ID = '51960918-3b24-4bac-8f1c-3892bf210f6d') 
+
     var selectAssignableIdsOfTargetContactQuery = "select " + pAssignableIdColumn
                                                     + " from " + pTableName
                                                     + " where " + pContactIdColumn + " = '" + pTargetContactId + "'";
 
     let deleteCondition = pAssignableIdColumn + " in"
-                            + " (" + selectAssignableIdsOfTargetContactQuery + ")"
-                            + " and " + pAssignableIdColumn + " = '" + pSourceContactId + "'";
+    + " (" + selectAssignableIdsOfTargetContactQuery + ")"
+    + " and " + pAssignableIdColumn + " = '" + pSourceContactId + "'";
     let recordsToDelete = [];
     recordsToDelete.push([pTableName, deleteCondition]);
     return recordsToDelete;
@@ -1436,9 +1430,9 @@ _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias = function()
 _DuplicateScannerUtils._loadFilters = function(pFilterName, pTargetEntity)
 {
     let query = "select FILTER_CONDITION, COUNT_CHARACTERS_TO_USE, MAX_RESULTS_THRESHOLD from DUPLICATESCANNERPREFILTERCONFIG"
-                + " join DUPLICATESCANNER on DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID"
-                + " where FILTER_NAME = '" + pFilterName + "'"
-            + " and ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'";
+    + " join DUPLICATESCANNER on DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID"
+    + " where FILTER_NAME = '" + pFilterName + "'"
+    + " and ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'";
     return db.table(query);
 }
 
@@ -1469,11 +1463,12 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p
 
             if(_DuplicateScannerUtils._isNotNullAndANumber(pCountCharsOfValueToUse)
                 && _DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse))
-            {
+                {
                 fieldValue = fieldValue.substring(0, pCountCharsOfValueToUse);
             }
 
             pJsonRootNode[filterChildNode].value = fieldValue;
+            pJsonRootNode[filterChildNode].key = fieldValue;
         }
         else
         {
@@ -1493,8 +1488,8 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p
 _DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCountCharsOfValueToUse)
 {
     return !isNaN(pCountCharsOfValueToUse)
-            && pCountCharsOfValueToUse > 0
-            && pValueLength > pCountCharsOfValueToUse;
+    && pCountCharsOfValueToUse > 0
+    && pValueLength > pCountCharsOfValueToUse;
 }
 
 /*