Skip to content
Snippets Groups Projects
Commit 0b439503 authored by David Büchler's avatar David Büchler
Browse files

Das Dubletten Entity wurde um weitere Parameter ergänzt. Es kann nun die ID...

Das Dubletten Entity wurde um weitere Parameter ergänzt. Es kann nun die ID eines Datensatzes mitgegeben werden, welcher in Form eines weiteren Filters exkludiert wird. Dadurch ist es möglich, den Datensatz, nach dem aktuell gesucht wird, von der Ergebnisliste auszuschließen.
Es wurde weitere Fehlerbehandlung eingebaut.
Bei der Validierung ob es bereits eine Filtername-Kombination gibt wurde ein Fehler behoben, bei dem auch beim Bearbeiten validiert und damit das Speichern verhindert wurde.
parent 9c5d619a
No related branches found
No related tags found
No related merge requests found
import("system.logging");
import("system.translate"); import("system.translate");
import("system.result"); import("system.result");
import("system.db"); import("system.db");
...@@ -11,6 +12,7 @@ import("system.vars"); ...@@ -11,6 +12,7 @@ import("system.vars");
var targetEntity = vars.get("$field.ENTITY_TO_SCAN_NAME"); var targetEntity = vars.get("$field.ENTITY_TO_SCAN_NAME");
var currentFilterName = vars.get("$field.FILTER_NAME"); var currentFilterName = vars.get("$field.FILTER_NAME");
var currentId = vars.get("$field.UID");
var messageText = "The combination of filter name and target entity is already in use"; var messageText = "The combination of filter name and target entity is already in use";
if(targetEntity != "") if(targetEntity != "")
...@@ -19,6 +21,9 @@ if(targetEntity != "") ...@@ -19,6 +21,9 @@ if(targetEntity != "")
+ " where ENTITY_TO_SCAN_NAME = '" + targetEntity + "'" + " where ENTITY_TO_SCAN_NAME = '" + targetEntity + "'"
+ " and FILTER_NAME = '" + currentFilterName + "'"; + " and FILTER_NAME = '" + currentFilterName + "'";
if(currentId != "")
query += " and ID != '" + currentId + "'";
var occurrences = parseInt(db.cell(query), 10); var occurrences = parseInt(db.cell(query), 10);
if(occurrences > 0) if(occurrences > 0)
......
...@@ -64,6 +64,11 @@ ...@@ -64,6 +64,11 @@
<name>maxReturnValueCount</name> <name>maxReturnValueCount</name>
<valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js</valueProcess> <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js</valueProcess>
</entityField> </entityField>
<entityParameter>
<name>recordIdToIgnore_param</name>
<expose v="true" />
<mandatory v="true" />
</entityParameter>
</entityFields> </entityFields>
<recordContainers> <recordContainers>
<jDitoRecordContainer> <jDitoRecordContainer>
...@@ -74,6 +79,9 @@ ...@@ -74,6 +79,9 @@
<jDitoRecordFieldMapping> <jDitoRecordFieldMapping>
<name>UID.value</name> <name>UID.value</name>
</jDitoRecordFieldMapping> </jDitoRecordFieldMapping>
<jDitoRecordFieldMapping>
<name>targetEntity.value</name>
</jDitoRecordFieldMapping>
<jDitoRecordFieldMapping> <jDitoRecordFieldMapping>
<name>VALUE1.value</name> <name>VALUE1.value</name>
</jDitoRecordFieldMapping> </jDitoRecordFieldMapping>
...@@ -83,9 +91,6 @@ ...@@ -83,9 +91,6 @@
<jDitoRecordFieldMapping> <jDitoRecordFieldMapping>
<name>VALUE3.value</name> <name>VALUE3.value</name>
</jDitoRecordFieldMapping> </jDitoRecordFieldMapping>
<jDitoRecordFieldMapping>
<name>targetEntity.value</name>
</jDitoRecordFieldMapping>
</recordFieldMappings> </recordFieldMappings>
</jDitoRecordContainer> </jDitoRecordContainer>
</recordContainers> </recordContainers>
......
import("system.result");
result.string("5");
\ No newline at end of file
...@@ -9,13 +9,14 @@ var values = JSON.parse(vars.get("$param.valuesToScan_param")); ...@@ -9,13 +9,14 @@ var values = JSON.parse(vars.get("$param.valuesToScan_param"));
var resultFields = JSON.parse(vars.get("$param.resultFields_param")); var resultFields = JSON.parse(vars.get("$param.resultFields_param"));
var resultFieldsIdFieldName = vars.get("$param.resultFieldsIdFieldName_param"); var resultFieldsIdFieldName = vars.get("$param.resultFieldsIdFieldName_param");
var maxRecorValues = parseInt(vars.get("$field.maxReturnValueCount"), 10); var maxRecorValues = parseInt(vars.get("$field.maxReturnValueCount"), 10);
var recordIdToIgnore = vars.get("$param.recordIdToIgnore_param");
logging.log("filterName -> " + filterName); logging.log("filterName -> " + filterName);
logging.log("targetEntity -> " + targetEntity); logging.log("targetEntity -> " + targetEntity);
logging.log("values -> " + values); logging.log("values -> " + values);
logging.log("resultFields -> " + resultFields); logging.log("resultFields -> " + resultFields);
var duplicates = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, values, resultFields); var duplicates = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, values, resultFields, resultFieldsIdFieldName, recordIdToIgnore);
logging.log("duplicates -> " + JSON.stringify(duplicates)); logging.log("duplicates -> " + JSON.stringify(duplicates));
//[{"FIRSTNAME":"Markus","LASTNAME":"Altinger","PERSONID":"0a611832-9476-481e-bde5-af3c3a98f1b4"}, //[{"FIRSTNAME":"Markus","LASTNAME":"Altinger","PERSONID":"0a611832-9476-481e-bde5-af3c3a98f1b4"},
...@@ -24,8 +25,7 @@ var returnRay = []; ...@@ -24,8 +25,7 @@ var returnRay = [];
logging.log("duplicates.length -> " + duplicates.length); logging.log("duplicates.length -> " + duplicates.length);
for (i = 0; i < duplicates.length; i++) for (i = 0; i < duplicates.length; i++)
{ {
logging.log("i -> " + i); let newRecord = _compileSingleRecord(duplicates[i], resultFieldsIdFieldName, maxRecorValues, targetEntity);
let newRecord = _compileSingleRecord(duplicates[i], resultFieldsIdFieldName, maxRecorValues);
logging.log("newRecord -> " + newRecord); logging.log("newRecord -> " + newRecord);
returnRay.push(newRecord); returnRay.push(newRecord);
...@@ -33,12 +33,13 @@ for (i = 0; i < duplicates.length; i++) ...@@ -33,12 +33,13 @@ for (i = 0; i < duplicates.length; i++)
result.object(returnRay); result.object(returnRay);
function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues) function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues, pTargetEntity)
{ {
let newRecord = []; let newRecord = [];
let recordId = pDuplicate[pIdFieldName]; let recordId = pDuplicate[pIdFieldName];
newRecord.push(recordId); newRecord.push(recordId);
newRecord.push(pTargetEntity);
let recordCount = 0; let recordCount = 0;
...@@ -64,7 +65,10 @@ function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues) ...@@ -64,7 +65,10 @@ function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues)
logging.log("newRecord.length -> " + newRecord.length); logging.log("newRecord.length -> " + newRecord.length);
logging.log("maxRecordValues -> " + maxRecordValues); logging.log("maxRecordValues -> " + maxRecordValues);
//If there are less elements than required, fill the record with empty strings //If there are less elements than required, fill the record with empty strings
//because a recor dof a recordContainer always has to have the correct length defined by the "recordFieldMappings
if(newRecord.length < maxRecordValues) if(newRecord.length < maxRecordValues)
{ {
let elementsToFill = maxRecordValues - newRecord.length; let elementsToFill = maxRecordValues - newRecord.length;
......
import("system.vars");
import("system.result");
result.string(vars.get("$field.PERSONID"));
\ No newline at end of file
...@@ -14,5 +14,6 @@ import("system.vars"); ...@@ -14,5 +14,6 @@ import("system.vars");
var firstname = vars.get("$field.FIRSTNAME"); var firstname = vars.get("$field.FIRSTNAME");
var lastname = vars.get("$field.LASTNAME"); var lastname = vars.get("$field.LASTNAME");
let gender = vars.get("$field.GENDER"); let gender = vars.get("$field.GENDER");
let recordId = vars.get("$field.PERSONID");
result.object({FIRSTNAME: firstname, LASTNAME: lastname, GENDER: gender}); result.object({FIRSTNAME: firstname, LASTNAME: lastname, GENDER: gender, PERSONID: recordId});
\ No newline at end of file \ No newline at end of file
...@@ -12,11 +12,12 @@ import("system.entities"); ...@@ -12,11 +12,12 @@ import("system.entities");
*/ */
function DuplicateScannerUtils() {} function DuplicateScannerUtils() {}
DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterValues, pTargetEntityResultFields) DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterValues, pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
{ {
let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity);
let configuredFilters = _DuplicateScannerUtils._loadFilters(pFilterName, pTargetEntity); let configuredFilters = _DuplicateScannerUtils._loadFilters(pFilterName, pTargetEntity);
logging.log("configuredFilters filter -> " + configuredFilters); configuredFilters = [ignoredRecordFilter].concat(configuredFilters);
let possibleDuplicates = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pTargetEntityResultFields, pFilterValues); let possibleDuplicates = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pTargetEntityResultFields, pFilterValues);
...@@ -40,6 +41,17 @@ var INDEX_FILTER_CONDITION = 0; ...@@ -40,6 +41,17 @@ var INDEX_FILTER_CONDITION = 0;
var INDEX_COUNT_CHARS_TO_USE = 1; var INDEX_COUNT_CHARS_TO_USE = 1;
var INDEX_MAX_RESULTS_THRESHOLD = 2; var INDEX_MAX_RESULTS_THRESHOLD = 2;
_DuplicateScannerUtils._getIgnoreRecordFilter = function(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity)
{
let ignoreFilterJson = JSON.stringify({"entity":pTargetEntity,"filter":{"type":"group","operator":"AND","childs":[{"type":"row","name":pRecordIdFieldToIgnore,"operator":"NOT_EQUAL","value":pRecordIdValueToIgnore,"key":"","contenttype":"TEXT"}]}});
return [ignoreFilterJson, null, null];
}
/*
* The pre filter is used to narrow the records to be searched by the duplicate scan service
* It loads the target entity and uses filters achieve this.
*/
_DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pTargetEntityResultFields, pFilterValues) _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pTargetEntityResultFields, pFilterValues)
{ {
var combinedFilter = {}; var combinedFilter = {};
...@@ -52,10 +64,11 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha ...@@ -52,10 +64,11 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha
if(filter == null || filter == "") if(filter == null || filter == "")
continue; continue;
logging.log("complete filter -> " + filter);
filter = JSON.parse(filter).filter; filter = JSON.parse(filter).filter;
logging.log("filter -> " + JSON.stringify(filter)); logging.log("countCharsOfValueToUse -> " + countCharsOfValueToUse);
logging.log("maxResultsThreshold -> " + maxResultsThreshold);
/* /*
* Insert the values into the current filter. Has to be here so that only the new filter * Insert the values into the current filter. Has to be here so that only the new filter
* and therefore the combinedFilter incrementally gets filled and not always everything multiple times. * and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
...@@ -161,13 +174,20 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p ...@@ -161,13 +174,20 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p
let fieldValue = pEntitiyFieldAndValueMap[fieldName]; let fieldValue = pEntitiyFieldAndValueMap[fieldName];
pCountCharsOfValueToUse = parseInt(pCountCharsOfValueToUse, 10); pCountCharsOfValueToUse = parseInt(pCountCharsOfValueToUse, 10);
logging.log("pEntitiyFieldAndValueMap -> " + JSON.stringify(pEntitiyFieldAndValueMap)); if(fieldValue == null)
logging.log("fieldName -> " + fieldName); {
logging.log("fieldValue -> " + fieldValue); logging.log("Duplicate Scan: Requested value for field " + fieldName + " not present in the provided valueslist");
logging.log("fieldValue.length -> " + fieldValue.length); continue;
logging.log("pCountCharsOfValueToUse -> " + pCountCharsOfValueToUse); }
if(_DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse)) // logging.log("pEntitiyFieldAndValueMap -> " + JSON.stringify(pEntitiyFieldAndValueMap));
// logging.log("fieldName -> " + fieldName);
// logging.log("fieldValue -> " + fieldValue);
// logging.log("fieldValue.length -> " + fieldValue.length);
// logging.log("pCountCharsOfValueToUse -> " + pCountCharsOfValueToUse);
if(_DuplicateScannerUtils._isNotNullAndANumber(pCountCharsOfValueToUse)
&& _DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse))
{ {
fieldValue = fieldValue.substring(0, pCountCharsOfValueToUse); fieldValue = fieldValue.substring(0, pCountCharsOfValueToUse);
logging.log("fieldValue geschnitten -> " + fieldValue); logging.log("fieldValue geschnitten -> " + fieldValue);
...@@ -197,4 +217,9 @@ _DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCo ...@@ -197,4 +217,9 @@ _DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCo
return !isNaN(pCountCharsOfValueToUse) return !isNaN(pCountCharsOfValueToUse)
&& pCountCharsOfValueToUse > 0 && pCountCharsOfValueToUse > 0
&& pValueLength > pCountCharsOfValueToUse; && pValueLength > pCountCharsOfValueToUse;
}
_DuplicateScannerUtils._isNotNullAndANumber = function(pCountCharsOfValueToUse)
{
return pCountCharsOfValueToUse != null && !isNaN(pCountCharsOfValueToUse);
} }
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment