diff --git a/entity/DuplicateScan_entity/entityfields/filter_name/onValidation.js b/entity/DuplicateScan_entity/entityfields/filter_name/onValidation.js index 597bb8faa5793a446bb110a7fc349a8d4895bf11..c239d4068de963afbb068c8c617d589c21beda26 100644 --- a/entity/DuplicateScan_entity/entityfields/filter_name/onValidation.js +++ b/entity/DuplicateScan_entity/entityfields/filter_name/onValidation.js @@ -1,3 +1,4 @@ +import("system.logging"); import("system.translate"); import("system.result"); import("system.db"); @@ -11,6 +12,7 @@ import("system.vars"); var targetEntity = vars.get("$field.ENTITY_TO_SCAN_NAME"); var currentFilterName = vars.get("$field.FILTER_NAME"); +var currentId = vars.get("$field.UID"); var messageText = "The combination of filter name and target entity is already in use"; if(targetEntity != "") @@ -19,6 +21,9 @@ if(targetEntity != "") + " where ENTITY_TO_SCAN_NAME = '" + targetEntity + "'" + " and FILTER_NAME = '" + currentFilterName + "'"; + if(currentId != "") + query += " and ID != '" + currentId + "'"; + var occurrences = parseInt(db.cell(query), 10); if(occurrences > 0) diff --git a/entity/Duplicates_entity/Duplicates_entity.aod b/entity/Duplicates_entity/Duplicates_entity.aod index 44e31cb3e5ab4d0218a513b85aa2966a4497dacd..c0a6d7fe20f6dd0caced42a0d3e84175246b5272 100644 --- a/entity/Duplicates_entity/Duplicates_entity.aod +++ b/entity/Duplicates_entity/Duplicates_entity.aod @@ -64,6 +64,11 @@ <name>maxReturnValueCount</name> <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js</valueProcess> </entityField> + <entityParameter> + <name>recordIdToIgnore_param</name> + <expose v="true" /> + <mandatory v="true" /> + </entityParameter> </entityFields> <recordContainers> <jDitoRecordContainer> @@ -74,6 +79,9 @@ <jDitoRecordFieldMapping> <name>UID.value</name> </jDitoRecordFieldMapping> + <jDitoRecordFieldMapping> + <name>targetEntity.value</name> + </jDitoRecordFieldMapping> <jDitoRecordFieldMapping> <name>VALUE1.value</name> </jDitoRecordFieldMapping> @@ -83,9 +91,6 @@ <jDitoRecordFieldMapping> <name>VALUE3.value</name> </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>targetEntity.value</name> - </jDitoRecordFieldMapping> </recordFieldMappings> </jDitoRecordContainer> </recordContainers> diff --git a/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js b/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..3c533d0fc5b71a57123ae0d97996f7f920411319 --- /dev/null +++ b/entity/Duplicates_entity/entityfields/maxreturnvaluecount/valueProcess.js @@ -0,0 +1,2 @@ +import("system.result"); +result.string("5"); \ No newline at end of file diff --git a/entity/Duplicates_entity/recordcontainers/jditorecordcontainer/contentProcess.js b/entity/Duplicates_entity/recordcontainers/jditorecordcontainer/contentProcess.js index 2dade6b9ba8d8f9746dc8f09f73286ea628dce29..f11e641ac15873f08289b4ea3cac23dc397ee48c 100644 --- a/entity/Duplicates_entity/recordcontainers/jditorecordcontainer/contentProcess.js +++ b/entity/Duplicates_entity/recordcontainers/jditorecordcontainer/contentProcess.js @@ -9,13 +9,14 @@ var values = JSON.parse(vars.get("$param.valuesToScan_param")); var resultFields = JSON.parse(vars.get("$param.resultFields_param")); var resultFieldsIdFieldName = vars.get("$param.resultFieldsIdFieldName_param"); var maxRecorValues = parseInt(vars.get("$field.maxReturnValueCount"), 10); +var recordIdToIgnore = vars.get("$param.recordIdToIgnore_param"); logging.log("filterName -> " + filterName); logging.log("targetEntity -> " + targetEntity); logging.log("values -> " + values); logging.log("resultFields -> " + resultFields); -var duplicates = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, values, resultFields); +var duplicates = DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, values, resultFields, resultFieldsIdFieldName, recordIdToIgnore); logging.log("duplicates -> " + JSON.stringify(duplicates)); //[{"FIRSTNAME":"Markus","LASTNAME":"Altinger","PERSONID":"0a611832-9476-481e-bde5-af3c3a98f1b4"}, @@ -24,8 +25,7 @@ var returnRay = []; logging.log("duplicates.length -> " + duplicates.length); for (i = 0; i < duplicates.length; i++) { - logging.log("i -> " + i); - let newRecord = _compileSingleRecord(duplicates[i], resultFieldsIdFieldName, maxRecorValues); + let newRecord = _compileSingleRecord(duplicates[i], resultFieldsIdFieldName, maxRecorValues, targetEntity); logging.log("newRecord -> " + newRecord); returnRay.push(newRecord); @@ -33,12 +33,13 @@ for (i = 0; i < duplicates.length; i++) result.object(returnRay); -function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues) +function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues, pTargetEntity) { let newRecord = []; let recordId = pDuplicate[pIdFieldName]; newRecord.push(recordId); + newRecord.push(pTargetEntity); let recordCount = 0; @@ -64,7 +65,10 @@ function _compileSingleRecord(pDuplicate, pIdFieldName, maxRecordValues) logging.log("newRecord.length -> " + newRecord.length); logging.log("maxRecordValues -> " + maxRecordValues); + + //If there are less elements than required, fill the record with empty strings + //because a recor dof a recordContainer always has to have the correct length defined by the "recordFieldMappings if(newRecord.length < maxRecordValues) { let elementsToFill = maxRecordValues - newRecord.length; diff --git a/entity/Person_entity/entityfields/personduplicates/children/recordidtoignore_param/valueProcess.js b/entity/Person_entity/entityfields/personduplicates/children/recordidtoignore_param/valueProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..0e4d59044e0e7deeb430491c96d6ef3b3c0e9c04 --- /dev/null +++ b/entity/Person_entity/entityfields/personduplicates/children/recordidtoignore_param/valueProcess.js @@ -0,0 +1,3 @@ +import("system.vars"); +import("system.result"); +result.string(vars.get("$field.PERSONID")); \ No newline at end of file diff --git a/entity/Person_entity/entityfields/personduplicates/children/valuestoscan_param/valueProcess.js b/entity/Person_entity/entityfields/personduplicates/children/valuestoscan_param/valueProcess.js index d0b8233bee305acf279697f7ed22a3f956bcb1ec..1e01a572a98104737bba1f2dae71fb293de8230c 100644 --- a/entity/Person_entity/entityfields/personduplicates/children/valuestoscan_param/valueProcess.js +++ b/entity/Person_entity/entityfields/personduplicates/children/valuestoscan_param/valueProcess.js @@ -14,5 +14,6 @@ import("system.vars"); var firstname = vars.get("$field.FIRSTNAME"); var lastname = vars.get("$field.LASTNAME"); let gender = vars.get("$field.GENDER"); +let recordId = vars.get("$field.PERSONID"); -result.object({FIRSTNAME: firstname, LASTNAME: lastname, GENDER: gender}); \ No newline at end of file +result.object({FIRSTNAME: firstname, LASTNAME: lastname, GENDER: gender, PERSONID: recordId}); \ No newline at end of file diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index bae646450a653d5860bf749835d871125dce02a5..0a2af60fd9cf8c4b0c0c40d6fc14538cfd315343 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -12,11 +12,12 @@ import("system.entities"); */ function DuplicateScannerUtils() {} -DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterValues, pTargetEntityResultFields) +DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterValues, pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore) { + let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity); let configuredFilters = _DuplicateScannerUtils._loadFilters(pFilterName, pTargetEntity); - - logging.log("configuredFilters filter -> " + configuredFilters); + + configuredFilters = [ignoredRecordFilter].concat(configuredFilters); let possibleDuplicates = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pTargetEntityResultFields, pFilterValues); @@ -40,6 +41,17 @@ var INDEX_FILTER_CONDITION = 0; var INDEX_COUNT_CHARS_TO_USE = 1; var INDEX_MAX_RESULTS_THRESHOLD = 2; +_DuplicateScannerUtils._getIgnoreRecordFilter = function(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity) +{ + let ignoreFilterJson = JSON.stringify({"entity":pTargetEntity,"filter":{"type":"group","operator":"AND","childs":[{"type":"row","name":pRecordIdFieldToIgnore,"operator":"NOT_EQUAL","value":pRecordIdValueToIgnore,"key":"","contenttype":"TEXT"}]}}); + + return [ignoreFilterJson, null, null]; +} + +/* + * The pre filter is used to narrow the records to be searched by the duplicate scan service + * It loads the target entity and uses filters achieve this. + */ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pTargetEntityResultFields, pFilterValues) { var combinedFilter = {}; @@ -52,10 +64,11 @@ _DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCha if(filter == null || filter == "") continue; - + logging.log("complete filter -> " + filter); filter = JSON.parse(filter).filter; - logging.log("filter -> " + JSON.stringify(filter)); + logging.log("countCharsOfValueToUse -> " + countCharsOfValueToUse); + logging.log("maxResultsThreshold -> " + maxResultsThreshold); /* * Insert the values into the current filter. Has to be here so that only the new filter * and therefore the combinedFilter incrementally gets filled and not always everything multiple times. @@ -161,13 +174,20 @@ _DuplicateScannerUtils._insertValuesInFilterTemplate = function(pJsonRootNode, p let fieldValue = pEntitiyFieldAndValueMap[fieldName]; pCountCharsOfValueToUse = parseInt(pCountCharsOfValueToUse, 10); - logging.log("pEntitiyFieldAndValueMap -> " + JSON.stringify(pEntitiyFieldAndValueMap)); - logging.log("fieldName -> " + fieldName); - logging.log("fieldValue -> " + fieldValue); - logging.log("fieldValue.length -> " + fieldValue.length); - logging.log("pCountCharsOfValueToUse -> " + pCountCharsOfValueToUse); + if(fieldValue == null) + { + logging.log("Duplicate Scan: Requested value for field " + fieldName + " not present in the provided valueslist"); + continue; + } - if(_DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse)) +// logging.log("pEntitiyFieldAndValueMap -> " + JSON.stringify(pEntitiyFieldAndValueMap)); +// logging.log("fieldName -> " + fieldName); +// logging.log("fieldValue -> " + fieldValue); +// logging.log("fieldValue.length -> " + fieldValue.length); +// logging.log("pCountCharsOfValueToUse -> " + pCountCharsOfValueToUse); + + if(_DuplicateScannerUtils._isNotNullAndANumber(pCountCharsOfValueToUse) + && _DuplicateScannerUtils._isValueLongerThanCharsToUse(fieldValue.length, pCountCharsOfValueToUse)) { fieldValue = fieldValue.substring(0, pCountCharsOfValueToUse); logging.log("fieldValue geschnitten -> " + fieldValue); @@ -197,4 +217,9 @@ _DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCo return !isNaN(pCountCharsOfValueToUse) && pCountCharsOfValueToUse > 0 && pValueLength > pCountCharsOfValueToUse; +} + +_DuplicateScannerUtils._isNotNullAndANumber = function(pCountCharsOfValueToUse) +{ + return pCountCharsOfValueToUse != null && !isNaN(pCountCharsOfValueToUse); } \ No newline at end of file