Skip to content
Snippets Groups Projects
Commit 400d8c71 authored by David Büchler's avatar David Büchler
Browse files

The possibility to integrate a webservice call and the necessary formatting of...

The possibility to integrate a webservice call and the necessary formatting of the values has been developed. The function RebuildDuplicatesCache now awaits a function as parameter in which an external developer can implement the steps to format the values and call the desired webservice. There is no native webservice call implementation because the formfactor of the data send to a webservice is always different and how with the particular webservices has to be communicated cant be known by the standard implementation.
The customized function gets an array which holds values based on the configured resultFields. To access a value the key which is its index field ought to be used. The resultfields, which are known at this place, can be converted to their respective index field name pendants using DuplicateScannerUtils.TranslateEntityToIndexFields which returns some kind of map (access via key).

New function DuplicateScannerUtils.TranslateEntityToIndexFields
parent 2a8771bb
No related branches found
No related tags found
No related merge requests found
......@@ -199,8 +199,17 @@ let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
logging.log("Löschen von PERSON Dubletten -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
let formatToJsonAndCallWs = function(pPossibleDuplicatesRay) {
logging.log("pPossibleDuplicatesRay -> " + pPossibleDuplicatesRay);
logging.log(" bin in functioin -> " + pPossibleDuplicatesRay.length);
if(pPossibleDuplicatesRay.length > 0)
logging.log("DemoIdFeld -> " + pPossibleDuplicatesRay[0][indexsearch.FIELD_ID]);
logging.log("wieder draußen -> ");
return pPossibleDuplicatesRay;
};
logging.log("Neu berechnen von PERSON Dubletten -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
duplicateFieldsConfig);
duplicateFieldsConfig, formatToJsonAndCallWs);
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
......@@ -229,7 +229,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForId = function(pClusterRecordId)
* DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);<br />
*/
DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity,
pQueryTargetRecords, pDuplicateFieldsConfig)
pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction)
{
logging.log("in RebuildDuplicatesCache -> ");
......@@ -271,22 +271,20 @@ pQueryTargetRecords, pDuplicateFieldsConfig)
logging.log("idValue -> " + idValue);
let foundDuplicates = DuplicateScannerUtils.ScanForDuplicates(pFilterName, pTargetEntity,
entityFieldValuesRay, resultFields, idField, idValue)
entityFieldValuesRay, resultFields, idField, idValue, pCallExternalWebserviceFunction)
// logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
logging.log("foundDuplicates -> " + foundDuplicates);
logging.log("foundDuplicates[indexsearch.TOTALHITS] -> " + foundDuplicates[indexsearch.TOTALHITS]);
if(foundDuplicates == null || foundDuplicates[indexsearch.TOTALHITS] == 0)
logging.log("foundDuplicates.length -> " + foundDuplicates.length);
if(foundDuplicates == null || foundDuplicates.length == 0)
continue;
//Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron.
let foundDuplicateIds = [];
for (let i = 0; i < foundDuplicates[indexsearch.HITS].length; i++)
for (let i = 0; i < foundDuplicates.length; i++)
{
// logging.log("i -> " + i);
// logging.log("foundDuplicates[pRecordIdFieldToIgnore] -> " + foundDuplicates[i][pRecordIdFieldToIgnore]);
let localId = foundDuplicates[indexsearch.HITS][i][indexsearch.FIELD_ID];
let localId = foundDuplicates[i][indexsearch.FIELD_ID];
logging.log("localId der gefundenen Dublette-> " + localId);
foundDuplicateIds.push(localId);
}
......@@ -383,7 +381,7 @@ DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity)
DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCallExternalWebserviceFunction)
{
let possibleDuplicates = [];
let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity);
......@@ -404,7 +402,11 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
if(useExternalWebservice)
{
possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
possibleDuplicates = possibleDuplicates[indexsearch.HITS];
if(possibleDuplicates.length > 0)
possibleDuplicates = pCallExternalWebserviceFunction.apply(this, [possibleDuplicates]);
}
else
{
......@@ -416,9 +418,9 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
//(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows)
//run actual index duplicate search
possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100);
logging.log("possibleDuplicates -> " + possibleDuplicates);
possibleDuplicates = possibleDuplicates[indexsearch.HITS];
}
logging.log("possibleDuplicates -> " + possibleDuplicates);
return possibleDuplicates;
}
......@@ -487,7 +489,7 @@ DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountChar
//Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
//Only load "indexsearch.FIELD_ID" and a resultSet size of 1
let searchResult = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [],
["Person_entity.FIRSTNAME"], 1);
[], 1);//todo use again after this has been fixed!! insert the local id after fix
logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]);
......@@ -643,7 +645,16 @@ DuplicateScannerUtils.BuildSelectFieldsFromFieldConfig = function(pIndexFieldsCo
}
DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields)
{
let entityIndexFields = {};
for (let i = 0; i < pEntityFields.length; i++)
{
let indexField = indexsearch.lookupIndexField(pEntityName, pEntityFields[i]);
entityIndexFields[pEntityFields[i]] = indexField;
}
return entityIndexFields;
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment