Skip to content
Snippets Groups Projects
Commit f1f26778 authored by David Büchler's avatar David Büchler
Browse files

Zwischenstand Dublettenermittlung

Die konfigurierten übergebenen Filter werden nun mit Werten befüllt. Unterstützung für filter.childs vom type == "group" und "row"
parent 654fb153
No related branches found
No related tags found
No related merge requests found
import("system.logging");
import("DuplicateScanner_lib");
var duplicates = DuplicateScannerUtils.ScanForDuplicates("FilterName", "Person_entity", "", ["PERSONID", "LASTNAME"]);
logging.log("duplicates -> " + duplicates);
\ No newline at end of file
var filterName = "FilterName";
var targetEntity = "Person_entity";
var values = {FIRSTNAME: "Anja", LASTNAME: "Lindner", GENDER: "f"};
var resultFields = ["PERSONID", "LASTNAME", "FIRSTNAME"];
var duplicates = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, values, resultFields);
\ No newline at end of file
......@@ -7,6 +7,7 @@
<layout>
<boxLayout>
<name>layout</name>
<direction>VERTICAL</direction>
</boxLayout>
</layout>
<children>
......
......@@ -15,10 +15,8 @@ DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity,
let configuredFilters = _DuplicateScannerUtils._loadFilters(pFilterName, pTargetEntity);
logging.log("configuredFilters filter -> " + configuredFilters);
logging.log("configuredFilters 0 -> " + configuredFilters[0]);
let possibleDuplicates = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pTargetEntityResultFields);
let possibleDuplicates = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pTargetEntityResultFields, pFilterValues);
// if(_DuplicateScannerUtils._isExternalScanServiceAvailable())
// {
......@@ -31,11 +29,10 @@ DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity,
function _DuplicateScannerUtils() {}
_DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pTargetEntityResultFields)
_DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pTargetEntityResultFields, pFilterValues)
{
var maxResultsTreshold = 4;
var combinedFilter = {};
logging.log("pFilterCountCharactersToUseRay.length -> " + pFilterCountCharactersToUseRay.length);
for (i = 0; i < pFilterCountCharactersToUseRay.length; i++)
{
......@@ -48,22 +45,25 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
filter = JSON.parse(filter).filter;
logging.log("filter -> " + JSON.stringify(filter));
logging.log("filter.childs -> " + JSON.stringify(filter.childs));
logging.log("filter.childs count -> " + filter.childs.length);
/*
* Insert the values into the current filter. Has to be here so that only the new filter
* and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
*/
filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.childs, pFilterValues, countCharsOfValueToUse);
logging.log("gefüllter filter -> " + JSON.stringify(filter.childs));
if(i == 0)
combinedFilter = filter;
else
{
//Extend the current combined filter with the next filter condition to further refine the results
logging.log("aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
//Extend the current combined filter with the next filter condition to further refine the results
//It seems to always contain one child element at the root
combinedFilter.childs.push(filter.childs[0]);
logging.log("filter.childs hinzufügen in combinedFilter -> " + JSON.stringify(filter.childs));
}
logging.log("combinedFilter -> " + JSON.stringify(combinedFilter));
//todo insert value into filter
logging.log("combinedFilter + gefüllter filter -> " + JSON.stringify(combinedFilter));
let loadRowsConfig = entities.createConfigForLoadingRows()
.entity(pTargetEntity)
......@@ -94,6 +94,8 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
return resultRows;
}
}
logging.log("zu viele rows und keine filter mehr -> ");
return [];
}
_DuplicateScannerUtils._isExternalScanServiceAvailable = function()
......@@ -115,6 +117,56 @@ _DuplicateScannerUtils._loadFilters = function(pFilterName, pTargetEntity)
+ " where FILTER_NAME = '" + pFilterName + "'"
+ " and ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'";
logging.log("_loadFilters query -> " + query);
// logging.log("_loadFilters query -> " + query);
return db.table(query);
}
_DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, pEntitiyFieldAndValueMap, pCountCharsOfValueToUse)
{
for(var filterChildNode in pJsonRootNode)
{
var currentNode = pJsonRootNode[filterChildNode];
logging.log("currentNode -> " + JSON.stringify(currentNode));
if(currentNode.type == "row")
{
let fieldName = currentNode.name;
let fieldValue = pEntitiyFieldAndValueMap[fieldName];
pCountCharsOfValueToUse = parseInt(pCountCharsOfValueToUse, 10);
// logging.log("fieldName -> " + fieldName);
// logging.log("fieldValue -> " + fieldValue);
// logging.log("fieldValue.length -> " + fieldValue.length);
// logging.log("pCountCharsOfValueToUse -> " + pCountCharsOfValueToUse);
if(_DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse))
{
fieldValue = fieldValue.substring(0, pCountCharsOfValueToUse);
logging.log("fieldValue geschnitten -> " + fieldValue);
}
pJsonRootNode[filterChildNode].value = fieldValue;
}
else
{
//currentNode.type == "group"
logging.log("type == group -> ");
let populatedChildNodes = _DuplicateScannerUtils._insertValuesInFilterTemplate(currentNode.childs, pEntitiyFieldAndValueMap, pCountCharsOfValueToUse);
logging.log("populatedChildNodes -> " + JSON.stringify(populatedChildNodes));
pJsonRootNode[filterChildNode].childs = populatedChildNodes;
}
}
return pJsonRootNode;
}
/*
* Returns wether or not a value should be substring'd
*
* @return If pCountCharsOfValueToUse is a number, greater than 0 and smaller than the values length -> true
*/
_DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCountCharsOfValueToUse)
{
return !isNaN(pCountCharsOfValueToUse)
&& pCountCharsOfValueToUse > 0
&& pValueLength > pCountCharsOfValueToUse;
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment