Skip to content
Snippets Groups Projects
Commit ed144c8e authored by David Büchler's avatar David Büchler
Browse files

The prefilter filter Json is now correctly created and returned in case of a...

The prefilter filter Json is now correctly created and returned in case of a successful search ( Found results in threshold)
parent ae388769
No related branches found
No related tags found
No related merge requests found
......@@ -27,7 +27,7 @@ var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from
+ " join PERSON on PERSONID = PERSON_ID";
var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"];
var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"]];
var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"], ["GENDER", "M"]];
//DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
//pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
......
......@@ -414,7 +414,7 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
configuredFilters = [ignoredRecordFilter].concat(configuredFilters);
logging.log("configuredFilters -> " + configuredFilters);
logging.log("Found filters -> " + configuredFilters);
let preFilter = DuplicateScannerUtils._applyPreFilterIndex(pTargetEntity, configuredFilters, pFilterFieldValueRays);
......@@ -471,14 +471,14 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
filterValuesObject[pFilterFieldValueRays[a][0]] = pFilterFieldValueRays[a][1];
}
logging.log("filterValuesObject -> " + JSON.stringify(filterValuesObject));
logging.log("filterValuesObject zum füllen des jsons -> " + JSON.stringify(filterValuesObject));
/*
* Insert the values into the current filter. Has to be here so that only the new filter
* and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
*/
logging.log("filter vor insert-> " + JSON.stringify(filter));
logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse);
logging.log("gefüllter filter prefilter index-> " + JSON.stringify(filter));
logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
......@@ -486,22 +486,27 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
combinedFilter = filter.filter;
else
{
logging.log("aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
//Extend the current combined filter with the next filter condition to further refine the results
//It seems to always contain one child element at the root
combinedFilter.childs.push(filter.filter.childs[0]);
//combinedFilter.childs.push(filter.filter.childs);
let newCombinedFilterChilds = combinedFilter.childs.concat(filter.filter.childs);
combinedFilter.childs = newCombinedFilterChilds;
}
logging.log("combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
// let completeFilter = JSON.parse(filter);
// completeFilter.filter = combinedFilter;
logging.log("completeFilter -> " + JSON.stringify(filter));
filter.filter = combinedFilter;
logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
//Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
//Only load "indexsearch.FIELD_ID" and a resultSet size of 1
let totalHits = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [],
[indexsearch.FIELD_ID], ["Person_entity.CONTACTID"], 1)[indexsearch.TOTALHITS];
[indexsearch.FIELD_ID], ["Person_entity.FIRSTNAME"], 1)[indexsearch.TOTALHITS];
logging.log("totalHits -> " + totalHits);
......@@ -526,7 +531,7 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun
logging.log("Im return valider Filter -> " + combinedFilter);
return combinedFilter;
return JSON.stringify(filter);
}
}
logging.log("zu viele rows und keine filter mehr -> ");
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment