From 8ef0c538bcaea7c6b32b2c682a9d13ae63f64d1b Mon Sep 17 00:00:00 2001 From: "d.buechler" <d.buechler@adito.de> Date: Tue, 24 Sep 2019 10:44:23 +0200 Subject: [PATCH] New actions to start the rebuilding of the caches for Person and Organisation have been created Removed unnecessary log outputs Wrong behaviour removed. If a valid prefilter exists but the filterpatternfields contain no value, the indexsearch got triggered nonetheless. The results which have been found on the base of the prefilter had been defined as duplicates which was wrong. Code examples have been added to the important functions like ScanForDuplicates or RebuildDuplicatesCache --- .../DuplicateScanner_entity.aod | 14 ++ .../onActionProcess.js | 48 ++++ .../testduplicatescanner/onActionProcess.js | 138 ++++++++---- .../DuplicateScannerFilter_view.aod | 1 + process/DuplicateScanner_lib/process.js | 208 ++++++++++++------ 5 files changed, 297 insertions(+), 112 deletions(-) create mode 100644 entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js diff --git a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod index 42f0b6d52f..5dfcfb00c6 100644 --- a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod +++ b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod @@ -101,6 +101,20 @@ </entityParameter> </children> </entityConsumer> + <entityActionGroup> + <name>RunActionGroup</name> + <children> + <entityActionField> + <name>RebuildPersonDuplicatesCache</name> + <title>Rebuild Person duplicates cache</title> + <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js</onActionProcess> + </entityActionField> + <entityActionField> + <name>RebuildOrganisationDuplicatesCache</name> + <title>Rebuild Organisation duplicates cache</title> + </entityActionField> + </children> + </entityActionGroup> </entityFields> <recordContainers> <dbRecordContainer> diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js new file mode 100644 index 0000000000..15809830cd --- /dev/null +++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js @@ -0,0 +1,48 @@ +import("system.logging"); +import("DuplicateScanner_lib"); + +var filterName = "PersonDuplicates"; +var targetEntity = "Person_entity"; + +let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); +let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); + +logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); +logging.log("resultFields -> " + resultFields); + +let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); +logging.log("querySelectFields -> " + querySelectFields); + +let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" + + " join PERSON on PERSONID = PERSON_ID" + + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; + +logging.log("Löschen von PERSON Dubletten -> "); +DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); + +let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) +{ + logging.log("inside callback -> "); + let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) + + //Run thru every duplicate result an read out the resultfields + for (let i = 0; i < pPossibleDuplicatesRay.length; i++) + { + for (let b = 0; b < resultFields.length; b++) + { + let entityFieldName = resultFields[b]; + let indexFieldName = indexResultFields[entityFieldName]; + //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); + //format values + } + } + //call webservice + //reformat results to same structure as before + return pPossibleDuplicatesRay; +}; + +logging.log("Neu berechnen von PERSON Dubletten -> "); +DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, +duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); + +DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js index f3fc39d0f5..24c4a7bd54 100644 --- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js @@ -181,49 +181,97 @@ import("JditoFilter_lib"); //DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); -//############################################################################## +//####################################Rebuild person duplicates########################################## + +//var filterName = "PersonDuplicates"; +//var targetEntity = "Person_entity"; +// +//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); +//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); +// +//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); +//logging.log("resultFields -> " + resultFields); +// +//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); +//logging.log("querySelectFields -> " + querySelectFields); +// +//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" +// + " join PERSON on PERSONID = PERSON_ID" +// + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; +// +//logging.log("Löschen von PERSON Dubletten -> "); +//DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); +// +//let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) +//{ +// let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) +// +// //Run thru every duplicate result an read out the resultfields +// for (let i = 0; i < pPossibleDuplicatesRay.length; i++) +// { +// for (let b = 0; b < resultFields.length; b++) +// { +// let entityFieldName = resultFields[b]; +// let indexFieldName = indexResultFields[entityFieldName]; +// //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); +// //format values +// } +// } +// //call webservice +// //reformat results to same structure as before +// return pPossibleDuplicatesRay; +//}; +// +//logging.log("Neu berechnen von PERSON Dubletten -> "); +//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, +//duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); +// +//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); + + + + + +//##################################single scanForDuplicates############################################ + -var filterName = "PersonDuplicates"; -var targetEntity = "Person_entity"; - -let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); -let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); - -logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); -logging.log("resultFields -> " + resultFields); - -let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); -logging.log("querySelectFields -> " + querySelectFields); - -let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" - + " join PERSON on PERSONID = PERSON_ID" - + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; - -logging.log("Löschen von PERSON Dubletten -> "); -DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); - -let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) { - logging.log(" bin in functioin -> " + pPossibleDuplicatesRay.length); - - let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) - - for (let i = 0; i < pPossibleDuplicatesRay.length; i++) - { - for (let b = 0; b < resultFields.length; b++) - { - let entityFieldName = resultFields[b]; - let indexFieldName = indexResultFields[entityFieldName]; - logging.log("entityFieldName -> " + entityFieldName); - logging.log("indexFieldName -> " + indexFieldName); - logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); - } - } - logging.log("wieder draußen -> "); - return pPossibleDuplicatesRay; -}; - -logging.log("Neu berechnen von PERSON Dubletten -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, -duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); - -DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); \ No newline at end of file +//var filterName = "PersonDuplicates"; +//var targetEntity = "Person_entity"; +//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); +//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); +//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); +// +//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" +// + " join PERSON on PERSONID = PERSON_ID" +// + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID" +// + " where Condition for the record to be checked"; +//let targetRecordsData = db.table(queryPersonFieldData); +// +//let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]); +////The first field in this Array must always be the configured id field. +//let idField = entityFieldValuesRay[0][0]; +//let idValue = entityFieldValuesRay[0][1]; +// +//let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) +//{ +// let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) +// +// //Run thru every duplicate result an read out the resultfields +// for (let i = 0; i < pPossibleDuplicatesRay.length; i++) +// { +// for (let b = 0; b < resultFields.length; b++) +// { +// let entityFieldName = resultFields[b]; +// let indexFieldName = indexResultFields[entityFieldName]; +// //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); +// //format values +// } +// } +// //call webservice +// //reformat results to same structure as before +// return pPossibleDuplicatesRay; +//}; +// +////The result values can be accessed as seen above in "formatToJsonAndCallWsCallback" +//DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields, +//idField, idValue, formatToJsonAndCallWsCallback); \ No newline at end of file diff --git a/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod b/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod index cd44bd1be4..fcfd2a13c4 100644 --- a/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod +++ b/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod @@ -11,6 +11,7 @@ <tableViewTemplate> <name>Filters</name> <favoriteActionGroup2>TestActionGroup</favoriteActionGroup2> + <favoriteActionGroup3>RunActionGroup</favoriteActionGroup3> <entityField>#ENTITY</entityField> <isCreatable v="false" /> <isEditable v="false" /> diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index 773330a743..8ac974b868 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -236,19 +236,19 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId) } /* - * Recreates the cached duplicate clusters based on the configured filters. - * The old clusters have to be deleted manually beforehand using "DeleteDuplicateClustersByTargetEntity". - * If there have already been ignored relations between duplicate records, it's advised to call "RefreshUnrelatedDuplicateRelations" after the recreation of the duplicates cache. - * Please check the documentation of the params on how to get the infos required. - * - * If the usage of an external webservice has been activated, the results will be narrowed down by the prefilter and - * will then be given to the pFormatValuesConsumeWebserviceCallback via parameter. - * To access the values it is advised to run thru the parameter like an array and access its value by key which is the index field name. The entity - * field names can be converted using DuplicateScannerUtils.TranslateEntityToIndexFields. For further infos see the example section below. - * - * Attention! - * If it is configured to use the external webservice callback the values have to be in the same format as they are in the parameter of the callback. - * + * Recreates the cached duplicate clusters based on the configured filters. <br /> + * The old clusters have to be deleted manually beforehand using "DeleteDuplicateClustersByTargetEntity".<br /> + * If there have already been ignored relations between duplicate records, it's advised to call "RefreshUnrelatedDuplicateRelations" after the recreation of the duplicates cache.<br /> + * Please check the documentation of the params on how to get the infos required.<br /> + * <br /> + * If the usage of an external webservice has been activated, the results will be narrowed down by the prefilter and<br /> + * will then be given to the pFormatValuesConsumeWebserviceCallback via parameter.<br /> + * To access the values it is advised to run thru the parameter like an array and access its value by key which is the index field name. The entity<br /> + * field names can be converted using DuplicateScannerUtils.TranslateEntityToIndexFields. For further infos see the example section below.<br /> + * <br /> + * Attention!<br /> + * If it is configured to use the external webservice callback the values have to be in the same format as they are in the parameter of the callback.<br /> + * <br /> * @param {String} pFilterName Name of the filter to use * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration * @param {String} pQueryTargetRecords Query which holds the values that are being used as configured in the filter. @@ -260,20 +260,41 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId) * @return {Int} Count of duplicate clusters created * * @example - * var filterName = "PersonDuplicates";<br /> - * var targetEntity = "Person_entity";<br /> - * todo - * queryPersonContactIds = "select CONTACTID, ORGANISATION.\"NAME\" from ORGANISATION"<br /> - * + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID"<br /> - * + " where CONTACTID != '0'";<br /> - * tmpFieldsInFilterRay = ["CONTACTID", "NAME"];<br /> - * <br /> - * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity)<br /> - * <br /> - * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds,<br /> - * tmpFieldsInFilterRay, resultFieldsIdFieldName);<br /> - * <br /> - * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);<br /> + * var filterName = "PersonDuplicates"; + * var targetEntity = "Person_entity"; + * + * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); + * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); + * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); + * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" + * + " join PERSON on PERSONID = PERSON_ID" + * + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; + * + * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) + * { + * let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) + * + * //Run thru every duplicate result an read out the resultfields + * for (let i = 0; i < pPossibleDuplicatesRay.length; i++) + * { + * for (let b = 0; b < resultFields.length; b++) + * { + * let entityFieldName = resultFields[b]; + * let indexFieldName = indexResultFields[entityFieldName]; + * //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); + * //format values + * } + * } + * //call webservice + * //reformat results to same structure as before + * return pPossibleDuplicatesRay; + * }; + * + * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); + * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, + * duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); + * + * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); */ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity, pQueryTargetRecords, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsumeWebserviceCallback) @@ -296,37 +317,38 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pResultFields, pFormatValuesConsume var duplicatesToInsertQueries = []; for (b = 0; b < targetRecordsData.length; b++) { - logging.log("b -> " + b); - logging.log("Neuer Record -> " + targetRecordsData[b]); + logging.log("Nächster Datensatz in RebuildDuplicatesCache -> " + b); +// logging.log("Neuer Record -> " + targetRecordsData[b]); //If the current Id has already been identified, continue if(alreadyIdentifiedIds.indexOf(targetRecordsData[b][0]) > -1) continue; let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pDuplicateFieldsConfig, targetRecordsData[b]); - logging.log("entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay)); + logging.log("Werte des Datensatzes entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay)); //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic let idField = entityFieldValuesRay[0][0]; let idValue = entityFieldValuesRay[0][1]; - logging.log("idField -> " + idField); - logging.log("idValue -> " + idValue); +// logging.log("idField -> " + idField); +// logging.log("idValue -> " + idValue); let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity, entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, useExternalWebservice) - logging.log("foundDuplicates -> " + foundDuplicates); - if(foundDuplicates == null || foundDuplicates.length == 0) + { + logging.log("Keine Datensätze gefunden continue;-> "); continue; - logging.log("foundDuplicates.length -> " + foundDuplicates.length); + } + logging.log("foundDuplicates.length nach _scanForDuplicates -> " + foundDuplicates.length); //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron. let foundDuplicateIds = []; for (let i = 0; i < foundDuplicates.length; i++) { let localId = foundDuplicates[i][indexsearch.FIELD_ID]; - logging.log("localId der gefundenen Dublette-> " + localId); + logging.log("foundDuplicates[i] -> " + foundDuplicates[i]); foundDuplicateIds.push(localId); } @@ -409,8 +431,48 @@ DuplicateScannerUtils.LoadIndexFieldsConfiguration = function(pFilterName, pTarg * If the function is called is based on the configuration of the current scanner * @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured resultfields, if an external webservices was used * the structure is defined by the parameterized function "pFormatValuesConsumeWebserviceCallback" + * * @example - * todo + * var filterName = "PersonDuplicates"; + * var targetEntity = "Person_entity"; + * let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); + * let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); + * let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); + * + * let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" + * + " join PERSON on PERSONID = PERSON_ID" + * + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID" + * + " where Condition for the record to be checked"; + * let targetRecordsData = db.table(queryPersonFieldData); + * + * let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]); + * //The first field in this Array must always be the configured id field. + * let idField = entityFieldValuesRay[0][0]; + * let idValue = entityFieldValuesRay[0][1]; + * + * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) + * { + * let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) + * + * //Run thru every duplicate result an read out the resultfields + * for (let i = 0; i < pPossibleDuplicatesRay.length; i++) + * { + * for (let b = 0; b < resultFields.length; b++) + * { + * let entityFieldName = resultFields[b]; + * let indexFieldName = indexResultFields[entityFieldName]; + * //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); + * //format values + * } + * } + * //call webservice + * //reformat results to same structure as before + * return pPossibleDuplicatesRay; + * }; + * + * //The result values can be accessed as seen above in "formatToJsonAndCallWsCallback" + * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields, + * idField, idValue, formatToJsonAndCallWsCallback); */ DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays, pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback) @@ -579,8 +641,8 @@ DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsC */ for (a = 0; a < pDuplicateFieldsConfig.length; a++) { - logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]); - logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]); +// logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]); +// logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]); entityFieldValuesRay.push([pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD], pTargetRecordData[a], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]]) } @@ -621,16 +683,20 @@ pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesCons return null; possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pResultFields, 100); + + if(possibleDuplicates == null) + return null; + possibleDuplicates = possibleDuplicates[indexsearch.HITS]; if(pUseExternalWebservice && possibleDuplicates.length > 0 && pFormatValuesConsumeWebserviceCallback != null) possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]); - logging.log("pTargetEntity -> " + pTargetEntity); - logging.log("preFilter -> " + preFilter); - logging.log("pFilterFieldValueRays -> " + pFilterFieldValueRays); - logging.log("pRecordIdFieldToIgnore -> " + pRecordIdFieldToIgnore); - logging.log("possibleDuplicates -> " + possibleDuplicates); +// logging.log("pTargetEntity -> " + pTargetEntity); +// logging.log("preFilter -> " + preFilter); +// logging.log("pFilterFieldValueRays -> " + pFilterFieldValueRays); +// logging.log("pRecordIdFieldToIgnore -> " + pRecordIdFieldToIgnore); +// logging.log("possibleDuplicates -> " + possibleDuplicates); return possibleDuplicates; } @@ -650,7 +716,6 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE .buildSql("select EXTERNAL_SERVICE_USAGE_ALLOWED from DUPLICATESCANNER" , "1=2"); - logging.log("scannerUseExternalWebserviceQuery -> " + scannerUseExternalWebserviceQuery); let isUseWebservice = db.cell(scannerUseExternalWebserviceQuery); return (isUseWebservice == 0) ? false : true; } @@ -699,14 +764,14 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha */ logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter)); filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse); - logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter)); - + //logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter)); + if(i == 0) combinedFilter = filter.filter; else { - logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter)); - logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter)); + //logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter)); + //logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter)); //Extend the current combined filter with the next filter condition to further refine the results //It seems to always contain one child element at the root //combinedFilter.childs.push(filter.filter.childs); @@ -714,7 +779,7 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha combinedFilter.childs = newCombinedFilterChilds; } - logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter)); + //logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter)); filter.filter = combinedFilter; logging.log("6 ###### completeFilter -> " + JSON.stringify(filter)); @@ -724,8 +789,7 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha let searchResult = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], [], 1);//todo use again after this has been fixed!! insert the local id after fix logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); - logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]); - + if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?! { for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) @@ -758,7 +822,6 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha else { //we're in the treshold, return the valid filter. The filter gets used lateron. - logging.log("Im return valider Filter -> " + JSON.stringify(filter)); return JSON.stringify(filter); } } @@ -774,7 +837,7 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha * @param {String} pTargetEntity Entity which has been configured * @param {String} pPreFilterJson The prefilters * @param {String[[]]} pEntityFieldValueRays Array of Arrays containing the name of a used field and its value. - * Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order + * Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order. NotNull!->Empty Array * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields" * @param {String} pResultSetRows todo * @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured pResultFields @@ -782,9 +845,9 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows) { let parsedFilterAsPatternTerm = indexsearch.buildQueryFromSearchCondition(pPreFilterJson); - logging.log("pTargetEntity -> " + pTargetEntity); + //logging.log("pTargetEntity -> " + pTargetEntity); logging.log("pResultFields -> " + pResultFields); - logging.log("pResultSetRows -> " + pResultSetRows); + //logging.log("pResultSetRows -> " + pResultSetRows); let indexQuery = indexsearch.createIndexQuery() .setPattern(parsedFilterAsPatternTerm) .setEntities([pTargetEntity]) @@ -805,7 +868,16 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson } logging.log("parsedFilterAsPatternTerm -> " + parsedFilterAsPatternTerm); - return indexsearch.searchIndex(indexQuery); + if(filterPatternConfig == null && pEntityFieldValueRays.length > 0) + { + logging.log("FilterPattern ist null aber es gibt pEntityFieldValueRays -> Die Felder sollten genutzt werden, beinhalten aber keine Werte"); + return null; + } + else + { + logging.log("Starte Indexsuche -> "); + return indexsearch.searchIndex(indexQuery); + } } /* @@ -819,7 +891,6 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson */ _DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) { - logging.log("pResultFields.length -> " + pResultFields.length); let resultIndexFields = []; let resultFields = []; for (let i = 0; i < pResultFields.length; i++) @@ -843,12 +914,12 @@ _DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) pIndexQuery = pIndexQuery.addResultIndexFields(resultIndexFields); if(resultFields.length > 0) + { pIndexQuery = pIndexQuery.addResultFields(resultFields); + } // } - logging.log("resultIndexFields -> " + resultIndexFields); logging.log("resultFields -> " + resultFields); - logging.log("pIndexQuery -> " + pIndexQuery); return pIndexQuery; } @@ -892,7 +963,7 @@ _DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRay filterPatternConfig.and(filterTerm); } } - return filterPatternConfig; + return (filterPatternConfig == null || filterPatternConfig.isEmpty()) ? null : filterPatternConfig; } /* @@ -1037,13 +1108,11 @@ _DuplicateScannerUtils._buildStatement = function(pTableinfos, pSourceContactId, } /* - * Contains all Tables and their fields which may contain the contact id to be replaced + * Contains all Tables and their fields which may contain the contact id to be replaced for the data alias * - * @param {String} - * @param {String[]} - * @returns {String} + * @returns {String[[]]} Array in the format [TableName, ContactIdColumnName, AdditionalCondition] */ -_DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias = function(pSourceContactId, pTargetContactId) +_DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias = function() { var tableInfos = new Array(); tableInfos.push(["AB_APPOINTMENTLINK", "OBJECT_ROWID", ""]); @@ -1082,6 +1151,11 @@ _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias = function(pSourceC return tableInfos; } +/* + * Contains all Tables and their fields which may contain the contact id to be replaced for the system alias + * + * @returns {String[[]]} Array in the format [TableName, ContactIdColumnName, AdditionalCondition] + */ _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias = function(pSourceContactId, pTargetContactId) { var tableInfos = new Array(); @@ -1129,7 +1203,7 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p if(fieldValue == null) { logging.show("Duplicate Scan: Requested value for field " + fieldName + " not present in the provided valueslist"); - continue; + return pJsonRootNode; } if(_DuplicateScannerUtils._isNotNullAndANumber(pCountCharsOfValueToUse) -- GitLab