From 984c07c376b5387b642413d0600896efeaed7b5f Mon Sep 17 00:00:00 2001
From: "d.buechler" <d.buechler@adito.de>
Date: Mon, 2 Dec 2019 13:54:20 +0100
Subject: [PATCH] SqlCondition has been replaced by the latest api calls of
 Sql_lib

---
 process/DuplicateScanner_lib/process.js | 203 +++++++++++++-----------
 1 file changed, 111 insertions(+), 92 deletions(-)

diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index 1ec852b8ea..4d090a9a62 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -36,8 +36,8 @@ DuplicateScannerUtils.loadFilters = function(pFilterName, pTargetEntity)
                     .from("DUPLICATESCANNERPREFILTERCONFIG")
                     .join("DUPLICATESCANNER", "DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID")
                     .where("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-                    .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pFilterName);
-                    
+                    .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity);
+
     return query.table();
 }
 
@@ -301,22 +301,24 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti
     let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
     let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity);
-    let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromPattern(indexPattern);
+    let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern);
     let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity);
-
-    //Add the id field to ensure that it's on the first position
-    entityFields = [entityIdField].concat(entityFields);
-
+    
     let alreadyIdentifiedIds = [];
+    
+    let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs(entityFieldConfigs);
 
+    entityFields.push(entityIdField);
+    
     let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFields, 0, pRecordsBlockSize);
 
     let currentRecordIndex = pRecordsBlockSize;
     while(targetRecords.length > 0)
     {
+        
         foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pTargetEntity, targetRecords,
-            entityFields, resultFields, useExternalWebservice,
-            pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds, indexPattern);
+            entityFieldConfigs, resultFields, useExternalWebservice,
+            pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds, indexPattern, entityIdField);
 
         alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
 
@@ -332,34 +334,33 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti
 }
 
 DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData,
-    pEntityFields, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds, pIndexPattern)
+    pEntityFieldConfigs, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds, pIndexPattern, pEntityIdField)
 {
     let foundDuplicateIds = [];
 
     //If the contact id loading query has no results, stop.
     //No ids should be deleted if an error has been made in this query.
     if(pTargetRecordsData.length <= 0)
-        return undefined;
+        return;
 
     //First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
     //Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
     var duplicatesToInsertQueries = [];
     for (b = 0; b < pTargetRecordsData.length; b++)
     {
-        let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pEntityFields, pTargetRecordsData[b]);
+        let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldConfigValueRays(pEntityFieldConfigs, pTargetRecordsData[b]);
 
         //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic
-        let idField = entityFieldValuesRay[0][0];
-        let idValue = entityFieldValuesRay[0][1];
+        let idValue = pTargetRecordsData[b][pEntityIdField];
 
         //If the current Id has already been identified, continue
-        if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][idField]) > -1)
+        if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][pEntityIdField]) > -1)
         {
             continue;
         }
 
         let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pTargetEntity,
-            entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern)
+            entityFieldValuesRay, pResultFields, pEntityIdField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern)
 
         if(foundDuplicates == null || foundDuplicates.length == 0)
         {
@@ -375,7 +376,7 @@ DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData,
         pAlreadyIdentifiedIds = pAlreadyIdentifiedIds.concat(foundDuplicateIds);
 
         //The duplicates list contains only the found duplicates to the original id, therefore it get's added manually
-        foundDuplicateIds.push(pTargetRecordsData[b][idField]);
+        foundDuplicateIds.push(pTargetRecordsData[b][pEntityIdField]);
 
         let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity)
         duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay);
@@ -385,10 +386,10 @@ DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData,
     return foundDuplicateIds;
 }
 
-/*
+/* 
  * Searches for a cluster which contains the duplicates specified by the parameterized array. <br />
  * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned.
- *
+ * 
  * @param {String} pNewRecordId The id of the record which was used to scan for duplicates
  * @param {String[]} pDuplicateIds Duplicate ids used to search for a cluster containing them
  * @param {String} pTargetEntity Entity which has been configured
@@ -397,17 +398,17 @@ DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData,
 DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds, pTargetEntity)
 {
     let duplicateIds = [];
-    //Run thru every duplicate result and read out the id.
+    //Run thru every duplicate result and read out the id. 
     //Do it now to have a simple array on all usages lateron.
     for (let i = 0; i < pDuplicateIds.length; i++)
     {
         let duplicateContactId = pDuplicateIds[i][indexsearch.FIELD_ID];
         duplicateIds.push(duplicateContactId);
     }
-
+    
     let clusterId = DuplicateScannerUtils.GetClusterWithIdenticalDuplicates(duplicateIds);
-
-    //If no cluster has beend found, create a new one with all found duplicateIds,
+    
+    //If no cluster has beend found, create a new one with all found duplicateIds, 
     //otherwise add the id to the existing cluster
     let idRayToInsert = [];
     if(clusterId == undefined || clusterId == null || clusterId == "")
@@ -417,16 +418,16 @@ DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds
     }
     else
         idRayToInsert.push(pNewRecordId);
-
+    
     insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(idRayToInsert, pTargetEntity, clusterId)
 
     return db.inserts(insertQueriesRay);
 }
 
-/*
+/* 
  * Searches for a cluster which contains the duplicates specified by the parameterized array. <br />
  * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned.
- *
+ * 
  * @param {String[]} pDuplicateIds Duplicate ids which should be in the same cluster
  * @returns {String} Id of the cluster which contains all given duplicate ids or ""
  */
@@ -436,12 +437,12 @@ DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds
 
     if(pDuplicateIds.length < 1)
         return RESULT_NO_CLUSTER_FOUND;
-
+    
     let clusterIdSelect = newSelect("distinct CLUSTERID")
                                 .from("DUPLICATECLUSTERS")
                                 .where();
-
-    for (let i = 0; i < pDuplicateIds.length; i++)
+    
+    for (let i = 0; i < pDuplicateIds.length; i++) 
     {
         clusterIdSelect.and("DUPLICATECLUSTERS.CLUSTERID", newSelect("CLUSTERID").from("DUPLICATECLUSTERS").where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateIds[i]),
                                         SqlBuilder.IN());
@@ -451,16 +452,16 @@ DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds
 
     if(foundClusterId == null || foundClusterId == "")
         return RESULT_NO_CLUSTER_FOUND;
-
+    
     let duplicatesInCluster = newSelect("DUPLICATEID")
                                             .from("DUPLICATECLUSTERS")
                                             .where("DUPLICATECLUSTERS.CLUSTERID", foundClusterId)
                                             .arrayColumn();
 
-    /*
+    /* 
      * A cluster has been searched which contains all duplicate ids as specified via parameter.
      * There's the possibility that this cluster contains even more duplicates than specified via the parameter.
-     * In this case, the cluster and the parameterized duplicateids are not identical
+     * In this case, the cluster and the parameterized duplicateids are not identical 
      * which means a new cluster has to be created.
      */
     if(pDuplicateIds.length != duplicatesInCluster.length)
@@ -524,18 +525,18 @@ DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity)
  * let targetEntity = "Person_entity";
  * let valuesToCheck = {};
  * var entityModel = project.getEntityStructure(targetEntity);
- *
+ * 
  * //Read the values of all available entity fields and write the fieldname/value combination
  * //as key/value pairs into an object. This is used to trigger the scan for duplicates
  * let fieldValue = "";
  * let entityFields = [];
- * for (fieldname in entityModel.fields)
- * {
- *     field = entityModel.fields[fieldname];
+ * for (fieldname in entityModel.fields) 
+ * { 
+ *     field = entityModel.fields[fieldname]; 
  *     if(field.fieldType == project.ENTITYFIELDTYPE_FIELD)
  *     {
  *         fieldValue = vars.get("$field." + field.name);
- *
+ *         
  *         if(fieldValue != null && fieldValue != "")
  *         {
  *             valuesToCheck[field.name] = fieldValue;
@@ -571,22 +572,18 @@ DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, p
 {
     let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
     let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity);
-
+    
     let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity);
-    let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromPattern(indexPattern);
+    let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern);  
     let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity);
-
-    //Add the id field to ensure that it's on the first position
-    entityFields = [entityIdField].concat(entityFields);
-
-    let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(entityFields, pValuesToCheck);
-
+    
+    let entityFieldConfigValuesRay = DuplicateScannerUtils.BuildEntityFieldConfigValueRays(entityFieldConfigs, pValuesToCheck);
+   
    //The first field in this Array must always be the configured id field.
-    let idField = entityFieldValuesRay[0][0];
-    let idValue = entityFieldValuesRay[0][1];
+    let idValue = pValuesToCheck[entityIdField];
 
     return _DuplicateScannerUtils._scanForDuplicates(pTargetEntity,
-        entityFieldValuesRay, resultFields, idField, idValue,
+        entityFieldConfigValuesRay, resultFields, entityIdField, idValue,
         pFormatValuesConsumeWebserviceCallback, useExternalWebservice, indexPattern)
 }
 
@@ -621,8 +618,9 @@ DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEnti
 }
 
 /*
- * Merges the source person into the target person.
- * This
+ *
+ * Merges the source person into the target person. 
+ * This 
  * - replaces the source's with the target's contactid in a predefined set of tables.
  * - resets the standard communications of the source contact and keeps the ones of the target.
  * - updates participants of campaigns and removes obsolet ones(which would be duplicates)
@@ -714,11 +712,11 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
 }
 
 /*
- * Creates an array of arrays containing the entity field name paired with it's value.
+ * Creates an array of arrays containing the entity field config paired with it's value.
  *
  * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration()
  * @param {{"key", "value"}} pTargetRecordData One record containing the values for the configured fields. It's in the format of {"key(=EntityFieldName", "Value"}
- * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]]
+ * @return {[[]]} An array of arrays containing the entity field config an its value. ["{entityfield: FIRSTNAME}", "PETER"]]
  * @example
  * pDuplicateFieldsConfig
  * ["CONTACTID", true, false]
@@ -727,12 +725,12 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
  * pTargetRecordData
  * ["d786045c-8b21-4f22-b6d9-72be9f61c04d", "PETER"]
  *
- * => [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"], ["FIRSTNAME", "PETER"]]
+ * => ["{entityfield: FIRSTNAME}", "PETER"]]
  */
-DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsConfig, pTargetRecordData)
+DuplicateScannerUtils.BuildEntityFieldConfigValueRays = function(pDuplicateFieldsConfig, pTargetRecordData)
 {
     let INDEX_CONFIG_ENTITY_FIELD = 0;
-    let entityFieldValuesRay = [];
+    let entityFieldConfigValuesRay = [];
     /*
     * Based on the parameterized filter field names and the values loaded via the query,
     * an array which contains records in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created.
@@ -740,15 +738,32 @@ DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsC
     */
     for (a = 0; a < pDuplicateFieldsConfig.length; a++)
     {
-        let entityField = pDuplicateFieldsConfig[a];
+        let fieldConfig = JSON.parse("{" + pDuplicateFieldsConfig[a] + "}");
+        let entityField = fieldConfig.entityfield;
         let entityFieldValue = pTargetRecordData[entityField];
-
         if(entityFieldValue == null)
             entityFieldValue = "";
+        
+        let exclude = fieldConfig.exclude;
+        if ( exclude )
+        {   
+            for(let i = 0; i < exclude.length; i++)
+                entityFieldValue = entityFieldValue.replace(new RegExp(exclude[i], "gi"), "");     
+        }
+        
+        let valuelength = fieldConfig.length; 
+        logging.log(valuelength)
+        if ( valuelength )
+            entityFieldValue = entityFieldValue.substr(0, parseInt(valuelength)) + "*";
+        
+        let emptyall = fieldConfig.emptyall; 
+        if ( fieldConfig.emptyall && entityFieldValue == "")
+            entityFieldValue = "*";
+        
         entityFieldValue = entityFieldValue.trim();
-        entityFieldValuesRay.push([entityField, entityFieldValue]);
+        entityFieldConfigValuesRay.push([pDuplicateFieldsConfig[a], entityFieldValue]);
     }
-    return entityFieldValuesRay.length > 0 ? entityFieldValuesRay : [["", ""]];
+    return entityFieldConfigValuesRay.length > 0 ? entityFieldConfigValuesRay : [["", ""]];
 }
 
 DuplicateScannerUtils.GetBlockSize = function()
@@ -767,6 +782,18 @@ var INDEX_TABLE_NAME = 0;
 var INDEX_COLUMN_NAME = 1;
 var INDEX_CONDITION = 2;
 
+_DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs = function(pEntityFieldConfigs)
+{
+    let fieldNames = [];
+    for (i = 0; i < pEntityFieldConfigs.length; i++) 
+    {
+        let fieldConfig = JSON.parse("{" + pEntityFieldConfigs[i] + "}");
+        let entityField = fieldConfig.entityfield;
+        fieldNames.push(entityField);
+    }
+    return fieldNames;
+}
+
 _DuplicateScannerUtils._buildUpdateResetStandardCommunications = function(pSourceContactId)
 {
     return [["COMMUNICATION", ["ISSTANDARD"], null, ["0"], newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId).build()]];
@@ -775,7 +802,7 @@ _DuplicateScannerUtils._buildUpdateResetStandardCommunications = function(pSourc
 /*
  * Gets the Pattern for the scanner
  * A pattern usually contains placeholders in the style of "{entityFieldName]"
- *
+ * 
  * @param {String} pScannerName Name of the filter to use
  * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
  * @returns {String} Scan pattern as string
@@ -783,35 +810,30 @@ _DuplicateScannerUtils._buildUpdateResetStandardCommunications = function(pSourc
 _DuplicateScannerUtils._loadIndexPattern = function(pScannerName, pTargetEntity)
 {
     let scanPattern = newSelect("SCAN_PATTERN")
-                .from("DUPLICATESCANNER")
-                .where("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-                .and("DUPLICATESCANNER.FILTER_NAME", pScannerName)
-                .cell();
+                            .from("DUPLICATESCANNER")
+                            .where("DUPLICATESCANNER.FILTER_NAME", pScannerName)
+                            .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+                            .cell();
     scanPattern = scanPattern.trim();
-
     return scanPattern;
 }
 
-_DuplicateScannerUtils._loadEntityFieldsFromPattern = function(indexPattern)
+_DuplicateScannerUtils._loadEntityFieldConfigsFromPattern = function(indexPattern)
 {
     return indexPattern.match(/[^{}]+(?=\})/g);
 }
 
-_DuplicateScannerUtils._replacePlaceholderForValuesInPattern = function(pIndexPattern, pEntityFieldValueRays, pDefaultValue)
+_DuplicateScannerUtils._replacePlaceholderForValuesInPattern = function(pIndexPattern, pEntityFieldValueRays)
 {
     let INDEX_ENTITY_FIELD_NAME = 0;
     let INDEX_ENTITY_FIELD_VALUE = 1;
 
     let placeholder = "";
     let fieldValue = "";
-    for (let i = 0; i < pEntityFieldValueRays.length; i++)
+    for (let i = 0; i < pEntityFieldValueRays.length; i++) 
     {
         placeholder = "{" + pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_NAME] + "}";
-        fieldValue = pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_VALUE];
-
-        if(pDefaultValue != null && (fieldValue == null || fieldValue == ""))
-            fieldValue = pDefaultValue;
-
+        fieldValue = pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_VALUE];        
         pIndexPattern = pIndexPattern.replace(placeholder, fieldValue);
     }
     return pIndexPattern;
@@ -819,37 +841,36 @@ _DuplicateScannerUtils._replacePlaceholderForValuesInPattern = function(pIndexPa
 
 _DuplicateScannerUtils._loadEntityIdField = function(pFilterName, pTargetEntity)
 {
-     let loadEntityIdField = newSelect("ID_FIELD_NAME")
+    return newSelect("ID_FIELD_NAME")
                 .from("DUPLICATESCANNER")
-                .where("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-                .and("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+                .where("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+                .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
                 .cell();
-    return loadEntityIdField;
 }
 
 /*
  * @see DuplicateScannerUtils.ScanForDuplicates for the documentation
  */
-_DuplicateScannerUtils._scanForDuplicates = function(pTargetEntity, pEntityFieldNameValueRays,
+_DuplicateScannerUtils._scanForDuplicates = function(pTargetEntity, pEntityFieldConfigValuesRay,
 pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern)
 {
     //No filterfields/indexpattern => No indexsearch
-    if(pEntityFieldNameValueRays.length < 1 || pIndexPattern == null || pIndexPattern == "")
+    if(pEntityFieldConfigValuesRay.length < 1 || pIndexPattern == null || pIndexPattern == "")
         return null;
-
+    
     let possibleDuplicates = [];
     let ignoreSourceRecordPattern = _DuplicateScannerUtils._getIgnoreSourceRecordPattern(pRecordIdFieldToIgnore, pRecordIdValueToIgnore);
 
-    let indexPatternWithValues = _DuplicateScannerUtils._replacePlaceholderForValuesInPattern(pIndexPattern, pEntityFieldNameValueRays, "*");
+    let indexPatternWithValues = _DuplicateScannerUtils._replacePlaceholderForValuesInPattern(pIndexPattern, pEntityFieldConfigValuesRay);
     indexPatternWithValues = ignoreSourceRecordPattern + indexPatternWithValues + ")";
 
     possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, indexPatternWithValues, pResultFields, 100);
-
+    
     if(possibleDuplicates == null)
         return null;
 
     possibleDuplicates = possibleDuplicates[indexsearch.HITS];
-
+    
     if(pUseExternalWebservice && pFormatValuesConsumeWebserviceCallback != null)
         possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]);
 
@@ -876,7 +897,7 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE
 /*
  * Runs the indexsearch based on the given parameters.
  * If the "pEntityFieldValueRays" is empty, no search will be executed.
- *
+ * 
  * @param {String} pTargetEntity Entity which has been configured
  * @param {String} pIndexPatternWithValues The pattern used to search. Has to contain the values already.
  * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
@@ -888,15 +909,13 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pIndexPatternW
     //The indexPattern can't be null because it is required to run the search.
     if(pIndexPatternWithValues == null || pIndexPatternWithValues == "")
         return null;
-    
     let indexQuery = indexsearch.createIndexQuery()
                                 .setPattern(pIndexPatternWithValues)
                                 .setEntities([pTargetEntity])
-                                //.setDefaultOperator(indexsearch.OPERATOR_AND)
+                                // .setDefaultOperator(indexsearch.OPERATOR_AND)
                                 //.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID")
                                 //.setRows(pResultSetRows);
     indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields);
-
     return indexsearch.searchIndex(indexQuery);
 }
 
@@ -1013,17 +1032,17 @@ _DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery = functio
 
 _DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery = function (pTableName, pContactIdColumn, pAssignableIdColumn, pSourceContactId, pTargetContactId, updateStatements)
 {
-
-//DELETE FROM CAMPAIGNPARTICIPANT
-// WHERE ( CAMPAIGN_ID in (select ab.CAMPAIGN_ID from (select CAMPAIGN_ID, CONTACT_ID from CAMPAIGNPARTICIPANT) ab where ab.CONTACT_ID = '64a51ec3-e75d-4415-8aa2-a00a1e9be0b0') and CAMPAIGN_ID = '51960918-3b24-4bac-8f1c-3892bf210f6d')
+    
+//DELETE FROM CAMPAIGNPARTICIPANT 
+// WHERE ( CAMPAIGN_ID in (select ab.CAMPAIGN_ID from (select CAMPAIGN_ID, CONTACT_ID from CAMPAIGNPARTICIPANT) ab where ab.CONTACT_ID = '64a51ec3-e75d-4415-8aa2-a00a1e9be0b0') and CAMPAIGN_ID = '51960918-3b24-4bac-8f1c-3892bf210f6d') 
 
     var selectAssignableIdsOfTargetContactQuery = newSelect(pAssignableIdColumn)
                                                         .from(pTableName)
                                                         .where([pTableName, pContactIdColumn], pTargetContactId);
-                                                        
+
     let deleteCondition = newWhere([pTableName, pAssignableIdColumn], selectAssignableIdsOfTargetContactQuery, SqlBuilder.IN())
                                 .and([pTableName, pAssignableIdColumn], pSourceContactId)
-        
+
     let recordsToDelete = [];
     recordsToDelete.push([pTableName, deleteCondition.build()]);
     return recordsToDelete;
-- 
GitLab