From 35101183a925166f9d01c534bc548f37dd51a1ef Mon Sep 17 00:00:00 2001
From: "d.buechler" <d.buechler@adito.de>
Date: Thu, 19 Sep 2019 10:42:35 +0200
Subject: [PATCH] The resultfields are now loaded and used in the search for
 duplicates. Refactoring to get a small glimpse of cleancodish structure More
 checks and case handlings to increase stability

---
 .../recordcontainer/contentProcess.js         |   1 -
 process/DuplicateScanner_lib/process.js       | 138 +++++++++++++-----
 2 files changed, 102 insertions(+), 37 deletions(-)

diff --git a/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js b/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js
index 4e268d83ca..3cd631af89 100644
--- a/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js
+++ b/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js
@@ -33,7 +33,6 @@ if(selectedId)
 }
 else
 {
-    logging.log("was nun -> ");
     if(targetEntity == "Person_entity")
     {
 
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index b7dc11c264..2d0d2e4928 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -346,17 +346,19 @@ pQueryTargetRecords, pDuplicateFieldsConfig)
      * First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
      * Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
      */
+    let resultFields = DuplicateScannerUtils._loadResultFields(pFilterName, pTargetEntity);
+    logging.log("configured resultFields -> " + resultFields);
     var duplicatesToInsertQueries = [];
     for (b = 0; b < targetRecordsData.length; b++) 
     {
         logging.log("b -> " + b);
         logging.log("Neuer Record -> " + targetRecordsData[b]);
-//        logging.log("indexOf(targetRecordsData[b] -> " + alreadyIdentifiedIds.indexOf(targetRecordsData[b]));
+        //logging.log("indexOf(targetRecordsData[b] -> " + alreadyIdentifiedIds.indexOf(targetRecordsData[b]));
         
         //If the current Id has already been identified, continue
         if(alreadyIdentifiedIds.indexOf(targetRecordsData[b][0]) > -1)
             continue;
-//        logging.log("contactid noch nicht bearbeitet -> " + targetRecordsData[b][0]);
+        //logging.log("contactid noch nicht bearbeitet -> " + targetRecordsData[b][0]);
         
         
         let entityFieldValuesRay = DuplicateScannerUtils._buildEntityFieldNameValueRays(pDuplicateFieldsConfig, targetRecordsData[b]);
@@ -369,12 +371,12 @@ pQueryTargetRecords, pDuplicateFieldsConfig)
         logging.log("idValue -> " + idValue);
         
         let foundDuplicates = DuplicateScannerUtils.ScanForDuplicatesIndex(pFilterName, pTargetEntity,
-                                entityFieldValuesRay, [/*TODO Hier kommen die Resultfields rein*/], idField, idValue)
+                                entityFieldValuesRay, resultFields, idField, idValue)
         
-//        logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
-logging.log("foundDuplicates -> " + foundDuplicates);
-            logging.log("foundDuplicates[indexsearch.TOTALHITS] -> " + foundDuplicates[indexsearch.TOTALHITS]);
-        if(foundDuplicates[indexsearch.TOTALHITS] == 0)
+        //        logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
+        logging.log("foundDuplicates -> " + foundDuplicates);
+        logging.log("foundDuplicates[indexsearch.TOTALHITS] -> " + foundDuplicates[indexsearch.TOTALHITS]);
+        if(foundDuplicates == null || foundDuplicates[indexsearch.TOTALHITS] == 0)
             continue;
         
  
@@ -457,6 +459,25 @@ DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration = function(pFilterNa
     return db.table(duplicateIndexFieldConfigurations);
 }
 
+DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity)
+{
+//    select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic
+//    join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID
+//    where ds.FILTER_NAME = 'PersonDuplicates'
+//    and ds.ENTITY_TO_SCAN_NAME = 'Person_entity'
+//    order by dsic.IS_ID_FIELD desc
+
+    let duplicateResultFields = SqlCondition.begin()
+        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+        .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID"
+        , "1=2");
+    
+    logging.log("duplicateResultFields condition-> " + duplicateResultFields);
+    return db.array(db.COLUMN, duplicateResultFields);
+}
+
+
 DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
 pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
 {
@@ -473,7 +494,7 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
     logging.log("preFilter welcher Elemente im erlaubten bereich ausgibt -> " + preFilter);
     
     if(preFilter == null)
-        return [];
+        return null;
     
     let useExternalWebservice = false;//todo
     
@@ -490,7 +511,7 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
         
         //(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows)
         //run actual index duplicate search
-        possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, [/*indexsearch.FIELD_ID*/], ["Person_entity.FIRSTNAME"], 100);
+        possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
         logging.log("possibleDuplicates -> " + possibleDuplicates);
     }
     
@@ -558,11 +579,11 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
         //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
         //Only load "indexsearch.FIELD_ID" and a resultSet size of 1
         let searchResult = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], 
-        [/*indexsearch.FIELD_ID*/], ["Person_entity.FIRSTNAME"], 1);
+        ["Person_entity.FIRSTNAME"], 1);
         logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
         logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]);
         
-        if(searchResult[indexsearch.HITS].length < 80)//todo entfernen?!
+        if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?!
         {
             for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) 
             {
@@ -594,14 +615,7 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
         }
         else
         {
-            //we're in the treshold, return the data
-//            let resultFields = []
-//            let resultRecords = _callIndexSearch(pTargetEntity, combinedFilter, resultFields, 1)[indexsearch.TOTALHITS];
-            
-            //loadRowsConfig = loadRowsConfig.fields(pTargetEntityResultFields);
-            //let resultRows = entities.getRows(loadRowsConfig);
-            
-            
+            //we're in the treshold, return the valid filter. The filter gets used lateron.
             logging.log("Im return valider Filter  -> " + JSON.stringify(filter));
             return JSON.stringify(filter);
         }
@@ -610,29 +624,80 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
     return null;
 }
 
-DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows)
+DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows)
 {
-    //The index to get the fields value for USE_FOR_SEARCH
-    //Structure of this array is [ENTITY_FIELD, FIELD_VALUE, USE_FOR_SEARCH]
-    let INDEX_CONFIG_USE_FOR_SEARCH = 2;
-    
     let parsedFilterAsPatternTerm = indexsearch.buildQueryFromSearchCondition(pPreFilterJson);
     logging.log("pTargetEntity -> " + pTargetEntity);
-    //logging.log("pResultIndexFields -> " + pResultIndexFields);
     logging.log("pResultFields -> " + pResultFields);
     logging.log("pResultSetRows -> " + pResultSetRows);
     let indexQuery = indexsearch.createIndexQuery()
                                 .setPattern(parsedFilterAsPatternTerm)
                                 .setEntities([pTargetEntity])
-                                //.addResultIndexFields(pResultIndexFields)
-                                .addResultFields(pResultFields)
                                 //.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID")
                                 .setRows(pResultSetRows);
 
-        logging.log("pEntityFieldValueRays.length -> " + pEntityFieldValueRays.length);
+    indexQuery = DuplicateScannerUtils._setResultFields(indexQuery, pResultFields);
+    
+    let filterPatternConfig = DuplicateScannerUtils._buildFilterPatternConfig(pEntityFieldValueRays, pTargetEntity);
+    if(filterPatternConfig != null)
+    {
+        let filterPatternString = indexsearch.buildPatternString(filterPatternConfig);
+        indexQuery = indexQuery.addFilter(filterPatternString);
+        logging.log("real filter PatternString -> " + filterPatternString);
+    }
+    
+    logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm);
+    
+    
+    return indexsearch.searchIndex(indexQuery);
+}
+
+DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields)
+{
+    logging.log("pResultFields.length -> " + pResultFields.length);
+    let resultIndexFields = [];
+    let resultFields = [];
+    for (let i = 0; i < pResultFields.length; i++) 
+    {
+        if(pResultFields[i] == indexsearch.FIELD_ID
+        || pResultFields[i] == indexsearch.FIELD_TITLE
+        || pResultFields[i] == indexsearch.FIELD_TYPE
+        || pResultFields[i] == indexsearch.FIELD_DESCRIPTION)
+        {
+            resultIndexFields.push(pResultFields[i]);
+        }
+        else
+            resultFields.push(pResultFields[i]);
+    }
+    
+//    if(resultIndexFields.length == 0 && resultFields.length == 0)
+//        pIndexQuery = pIndexQuery.addResultIndexFields([indexsearch.FIELD_ID]);
+//    else 
+//    {
+        if(resultIndexFields.length > 0)
+            pIndexQuery = pIndexQuery.addResultIndexFields(resultIndexFields);
+        
+        if(resultFields.length > 0)
+            pIndexQuery = pIndexQuery.addResultFields(resultFields);
+//    }
+    
+    logging.log("resultIndexFields -> " + resultIndexFields);
+    logging.log("resultFields -> " + resultFields);
+    logging.log("pIndexQuery -> " + pIndexQuery);
+    return pIndexQuery;
+}
+
+DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRays, pTargetEntity)
+{
+    //The index to get the fields value for USE_FOR_SEARCH
+    //Structure of this array is [ENTITY_FIELD, FIELD_VALUE, USE_FOR_SEARCH]
+    let INDEX_CONFIG_USE_FOR_SEARCH = 2;
+    let filterPatternConfig = null;
+    
+    logging.log("pEntityFieldValueRays.length -> " + pEntityFieldValueRays.length);
     if(pEntityFieldValueRays.length > 0)
     {
-        let filterPatternConfig = indexsearch.createPatternConfig();
+        filterPatternConfig = indexsearch.createPatternConfig();
         for (let i = 0; i < pEntityFieldValueRays.length; i++) 
         {
             if(pEntityFieldValueRays[i][INDEX_CONFIG_USE_FOR_SEARCH] == 0)
@@ -652,14 +717,8 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson,
                                         
             filterPatternConfig.and(filterTerm);
         }
-        logging.log("hallo -> ");
-        let filterPatternString = indexsearch.buildPatternString(filterPatternConfig);
-        
-        logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm);
-        logging.log("filterPatternString -> " + filterPatternString);
-        indexQuery.addFilter(filterPatternString);
     }
-    return indexsearch.searchIndex(indexQuery);;
+    return filterPatternConfig;
 }
 
 
@@ -674,6 +733,13 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson,
 
 
 
+
+
+
+
+
+
+
 
 
 
-- 
GitLab