From e26e9f343be3ca8498c0c9ca9e485656e32fd46b Mon Sep 17 00:00:00 2001
From: "d.buechler" <d.buechler@adito.de>
Date: Fri, 20 Sep 2019 16:59:24 +0200
Subject: [PATCH] Refactorings Documentation Example code to show the usage of
 the webservice callback

---
 .../testduplicatescanner/onActionProcess.js   |  27 +++-
 process/DuplicateScanner_lib/process.js       | 133 +++++++++++++-----
 2 files changed, 118 insertions(+), 42 deletions(-)

diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
index 90b0bca7931..f3fc39d0f52 100644
--- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
+++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js
@@ -186,13 +186,15 @@ import("JditoFilter_lib");
 var filterName = "PersonDuplicates";
 var targetEntity = "Person_entity";
 
-
-let duplicateFieldsConfig = DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration(filterName, targetEntity);
+let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
+let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
 
 logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
+logging.log("resultFields -> " + resultFields);
 
 let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
 logging.log("querySelectFields -> " + querySelectFields);
+
 let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
                             + " join PERSON on PERSONID = PERSON_ID"
                             + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
@@ -200,17 +202,28 @@ let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
 logging.log("Löschen von PERSON Dubletten -> ");
 DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
 
-let formatToJsonAndCallWs = function(pPossibleDuplicatesRay) {
-    logging.log("pPossibleDuplicatesRay  -> " + pPossibleDuplicatesRay);
+let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) {
     logging.log(" bin in functioin -> " + pPossibleDuplicatesRay.length);
-    if(pPossibleDuplicatesRay.length > 0)
-        logging.log("DemoIdFeld -> " + pPossibleDuplicatesRay[0][indexsearch.FIELD_ID]);
+    
+    let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
+    
+    for (let i = 0; i < pPossibleDuplicatesRay.length; i++) 
+    {
+        for (let b = 0; b < resultFields.length; b++) 
+        {
+            let entityFieldName = resultFields[b];
+            let indexFieldName = indexResultFields[entityFieldName];
+            logging.log("entityFieldName -> " + entityFieldName);
+            logging.log("indexFieldName -> " + indexFieldName);
+            logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
+        }
+    }
     logging.log("wieder draußen -> ");
     return pPossibleDuplicatesRay;
 };
 
 logging.log("Neu berechnen von PERSON Dubletten -> ");
 DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
-duplicateFieldsConfig, formatToJsonAndCallWs);
+duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
 
 DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index f4d3df464d1..689d29526a6 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -204,9 +204,15 @@ DuplicateScannerUtils.GetCachedDuplicatesForId = function(pClusterRecordId)
  * The old clusters have to be deleted manually beforehand using "DeleteDuplicateClustersByTargetEntity".
  * If there have already been ignored relations between duplicate records, it's advised to call "RefreshUnrelatedDuplicateRelations" after the recreation of the duplicates cache.
  * 
+ * Attention!
+ * If it is configured to use the external webservice callback the values have to be in the same format as they are in the parameter of the callback.
+ * 
  * @param {String} pFilterName Name of the filter to use
  * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
  * @param {String} pQueryTargetRecords Query which holds the values that are being used as configured in the filter.
+ * @param {String} pDuplicateFieldsConfig 
+ * @param {String} pResultFields todo
+ * @param {String} pFormatValuesConsumeWebserviceCallback 
  * Important: The first element has to be the id field!
  * @param {String[]} pDuplicateFieldsConfig The configuration of the fields and their usage. @see DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration
  *
@@ -229,7 +235,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForId = function(pClusterRecordId)
  * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);<br />
  */
 DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
-pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
+pQueryTargetRecords, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWebserviceCallback)
 {   
     
     logging.log("in RebuildDuplicatesCache -> ");
@@ -248,8 +254,7 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
      * First it gets checked if the current id has already been identified. If that's the case it'll continue with the next.
      * Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started
      */
-    let resultFields = _DuplicateScannerUtils._loadResultFields(pFilterName, pTargetEntity);
-    logging.log("configured resultFields -> " + resultFields);
+    logging.log("configured pResultFields -> " + pResultFields);
     var duplicatesToInsertQueries = [];
     for (b = 0; b < targetRecordsData.length; b++) 
     {
@@ -273,14 +278,14 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
         logging.log("idValue -> " + idValue);
         
         let foundDuplicates = DuplicateScannerUtils.ScanForDuplicates(pFilterName, pTargetEntity,
-                                entityFieldValuesRay, resultFields, idField, idValue, pCallExternalWebserviceFunction, useExternalWebservice)
+                                entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
         
         //        logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
         logging.log("foundDuplicates -> " + foundDuplicates);
-        logging.log("foundDuplicates.length -> " + foundDuplicates.length);
+        
         if(foundDuplicates == null || foundDuplicates.length == 0)
             continue;
-        
+        logging.log("foundDuplicates.length -> " + foundDuplicates.length);
  
         //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron.
         let foundDuplicateIds = [];
@@ -305,11 +310,43 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
     return db.inserts(duplicatesToInsertQueries);
 }
 
-//Array
-//[DB_FELD, ENTITY_FELD, IS_ID, USE_FOR_SEARCH]
-//["CONTACTID", "CONTACTID", true, false]
-//["FIRSTNAME", "FIRSTNAME", false, true]
-DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration = function(pFilterName, pTargetEntity)
+/* 
+ * Loads the configured resultfields as array
+ * 
+ *  @param {String} pFilterName Name of the filter
+ *  @param {String} pTargetEntity Entity which has been configured
+ *  @returns {String[]} Resultfields as array 
+ */
+DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity)
+{
+//    select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic
+//    join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID
+//    where ds.FILTER_NAME = 'PersonDuplicates'
+//    and ds.ENTITY_TO_SCAN_NAME = 'Person_entity'
+//    order by dsic.IS_ID_FIELD desc
+
+    let duplicateResultFields = SqlCondition.begin()
+        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
+        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
+        .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID"
+        , "1=2");
+    
+    logging.log("duplicateResultFields condition-> " + duplicateResultFields);
+    return db.array(db.COLUMN, duplicateResultFields);
+}
+
+/* 
+ * Loads the fields and their configuration. 
+ * One field record is in the following format: 
+ * [DB_FELD, ENTITY_FELD, IS_ID, USE_FOR_SEARCH]
+ * Example:
+ * ["FIRSTNAME", "FIRSTNAME", false, true]
+ * 
+ *  @param {String} pFilterName Name of the filter
+ *  @param {String} pTargetEntity Entity which has been configured
+ *  @returns {String[[]]} An Array of Arrays in the format described above 
+ */
+DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTargetEntity)
 {
 //    select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic
 //    join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID
@@ -327,8 +364,27 @@ DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration = function(pFilterNa
     return db.table(duplicateIndexFieldConfigurations);
 }
 
+/* 
+ * Scans for duplicates based on the configured prefilters and the pFilterFieldValueRays.
+ * First the prefilters get applied one after another until the count of the returned data is in the allowed threshold.
+ * Then, the duplicate search using the index starts. All fields which have been configured will be used here.
+ * If the usage of an external webservice has been activated, the result will then be given to the pFormatValuesConsumeWebserviceCallback via parameter.
+ * To access the values it is advised to run thru the parameter like an array and access its value by key which is the index field name. The entity
+ * field names can be converted using DuplicateScannerUtils.TranslateEntityToIndexFields
+ * 
+ * Attention!
+ * If it's a single ScanForDuplicates call it doesn't matter what the callback returns because after the callback, no more modifications follow before
+ * returning the data.
+ * If it's inside the RebuildCache the values have to be in the same format as the parameter
+ * 
+ * @param {String} 
+ * @param {String[]} 
+ * @returns {String} 
+ * @example
+ *  
+ */
 DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
-pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCallExternalWebserviceFunction, pUseExternalWebservice)
+pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice)
 {
     let possibleDuplicates = [];
     let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity);
@@ -348,8 +404,8 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCall
     possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
     possibleDuplicates = possibleDuplicates[indexsearch.HITS];
     
-    if(pUseExternalWebservice && possibleDuplicates.length > 0 && pCallExternalWebserviceFunction != null)
-        possibleDuplicates = pCallExternalWebserviceFunction.apply(this, [possibleDuplicates]);
+    if(pUseExternalWebservice && possibleDuplicates.length > 0 && pFormatValuesConsumeWebserviceCallback != null)
+        possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]);
 
     logging.log("pTargetEntity -> " + pTargetEntity);
     logging.log("preFilter -> " + preFilter);
@@ -360,6 +416,12 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCall
     return possibleDuplicates;
 }
 
+/* 
+ * Concatenates the fields with a semicolon as separated. This can be used in a sql select.
+ * 
+ *  @param {String[]} pIndexFieldsConfig Array of Names
+ *  @returns {String} String in the style of "Value1, Value2, Value3"
+ */
 DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig = function(pIndexFieldsConfig)
 {
     let querySelectFields = "";
@@ -374,12 +436,30 @@ DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig = function(pIndexField
 }
 
 
+/* 
+ * Executes a indexsearch.lookupIndexField for eacht entity field in the parameterized array
+ * and returns it as Map.
+ *  @param {String} pEntityName ...Name of the entity
+ *  @param {String[]} pEntityFields Array of the entities Fields to translate to index fields
+ *  @returns Map-like object where (key = entity field) and (value = index field)
+ *  @example
+ *  let entityResultFields = ["LASTNAME"];
+ *  let entityIndexFields = DuplicateScannerUtils.TranslateEntityToIndexFields("Person_entity", entityResultFields);
+ *  
+ *  logging.log(entityIndexFields["LASTNAME"]);//=> "LASTNAME_value"
+ */
 DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields)
 {
     let entityIndexFields = {};
     for (let i = 0; i < pEntityFields.length; i++) 
     {
-         let indexField = indexsearch.lookupIndexField(pEntityName, pEntityFields[i]);
+        let entityFieldName = pEntityFields[i];
+
+        //todo remove if api has been fixed
+        if(entityFieldName.startsWith(pEntityName))
+            entityFieldName = entityFieldName.replace(pEntityName + ".", "");
+
+         let indexField = indexsearch.lookupIndexField(pEntityName, entityFieldName);
          entityIndexFields[pEntityFields[i]] = indexField;
     }
     return entityIndexFields;
@@ -497,24 +577,6 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE
     return (isUseWebservice == 0) ? false : true;
 }
 
-_DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity)
-{
-//    select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic
-//    join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID
-//    where ds.FILTER_NAME = 'PersonDuplicates'
-//    and ds.ENTITY_TO_SCAN_NAME = 'Person_entity'
-//    order by dsic.IS_ID_FIELD desc
-
-    let duplicateResultFields = SqlCondition.begin()
-        .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
-        .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
-        .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID"
-        , "1=2");
-    
-    logging.log("duplicateResultFields condition-> " + duplicateResultFields);
-    return db.array(db.COLUMN, duplicateResultFields);
-}
-
 /* 
  * Creates an array of arrays containing the entity field name paired with it's value.
  * 
@@ -676,6 +738,9 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson
 
     indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields);
     
+    //indexQuery = indexQuery.addResultFields(["Person_entity.FIRSTNAME", "Person_entity.LASTNAME"]);
+    //indexQuery = indexQuery.addResultFields(["FIRSTNAME", "LASTNAME"]);
+    
     let filterPatternConfig = _DuplicateScannerUtils._buildFilterPatternConfig(pEntityFieldValueRays, pTargetEntity);
     if(filterPatternConfig != null)
     {
@@ -683,10 +748,8 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson
         indexQuery = indexQuery.addFilter(filterPatternString);
         logging.log("real filter PatternString -> " + filterPatternString);
     }
-    
     logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm);
     
-    
     return indexsearch.searchIndex(indexQuery);
 }
 
-- 
GitLab