From ed144c8e70726d734aae26a097ab461826f4b215 Mon Sep 17 00:00:00 2001
From: "d.buechler" <d.buechler@adito.de>
Date: Tue, 17 Sep 2019 09:18:43 +0200
Subject: [PATCH] The prefilter filter Json is now correctly created and
 returned in case of a successful search ( Found results in threshold)

---
 .../testduplicatescanner/onActionProcess.js   |  2 +-
 process/DuplicateScanner_lib/process.js       | 27 +++++++++++--------
 2 files changed, 17 insertions(+), 12 deletions(-)

diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index 86369d8f6be..ae46a45da3f 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -27,7 +27,7 @@ var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from
                             + " join PERSON on PERSONID = PERSON_ID";
 var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"];
 
-var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"]];
+var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"], ["GENDER", "M"]];
 
 //DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
 //pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index e0b7817a485..a4438a20c21 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -414,7 +414,7 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
 
     configuredFilters = [ignoredRecordFilter].concat(configuredFilters);
     
-    logging.log("configuredFilters -> " + configuredFilters);
+    logging.log("Found filters -> " + configuredFilters);
     
     let preFilter = DuplicateScannerUtils._applyPreFilterIndex(pTargetEntity, configuredFilters, pFilterFieldValueRays);
     
@@ -471,14 +471,14 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
             filterValuesObject[pFilterFieldValueRays[a][0]] = pFilterFieldValueRays[a][1];
         }
 
-        logging.log("filterValuesObject -> " + JSON.stringify(filterValuesObject));
+        logging.log("filterValuesObject zum füllen des jsons -> " + JSON.stringify(filterValuesObject));
         /*
          * Insert the values into the current filter. Has to be here so that only the new filter
          * and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
          */
-        logging.log("filter vor insert-> " + JSON.stringify(filter));
+        logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
         filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse);
-        logging.log("gefüllter filter prefilter index-> " + JSON.stringify(filter));
+        logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
         
         
         
@@ -486,22 +486,27 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
             combinedFilter = filter.filter;
         else
         {
-            logging.log("aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
-            
+            logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
+            logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
             //Extend the current combined filter with the next filter condition to further refine the results
             //It seems to always contain one child element at the root
-            combinedFilter.childs.push(filter.filter.childs[0]);
+            //combinedFilter.childs.push(filter.filter.childs);
+            let newCombinedFilterChilds = combinedFilter.childs.concat(filter.filter.childs);
+            combinedFilter.childs = newCombinedFilterChilds;
         }
         
-        logging.log("combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
+        logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
         
 //        let completeFilter = JSON.parse(filter);
 //        completeFilter.filter = combinedFilter;
-        logging.log("completeFilter -> " + JSON.stringify(filter));
+        
+        filter.filter = combinedFilter;
+        logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
+        
         //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
         //Only load "indexsearch.FIELD_ID" and a resultSet size of 1
         let totalHits = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], 
-        [indexsearch.FIELD_ID], ["Person_entity.CONTACTID"], 1)[indexsearch.TOTALHITS];
+        [indexsearch.FIELD_ID], ["Person_entity.FIRSTNAME"], 1)[indexsearch.TOTALHITS];
         
         logging.log("totalHits -> " + totalHits);
         
@@ -526,7 +531,7 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
             
             
             logging.log("Im return valider Filter  -> " + combinedFilter);
-            return combinedFilter;
+            return JSON.stringify(filter);
         }
     }
     logging.log("zu viele rows und keine filter mehr -> ");
-- 
GitLab