Skip to content
Snippets Groups Projects
Commit 8ef0c538 authored by David Büchler's avatar David Büchler
Browse files

New actions to start the rebuilding of the caches for Person and Organisation have been created

Removed unnecessary log outputs
Wrong behaviour removed. If a valid prefilter exists but the filterpatternfields contain no value, the indexsearch got triggered nonetheless. The results which have been found on the base of the prefilter had been defined as duplicates which was wrong.
Code examples have been added to the important functions like ScanForDuplicates or RebuildDuplicatesCache
parent 8e870182
No related branches found
No related tags found
No related merge requests found
......@@ -101,6 +101,20 @@
</entityParameter>
</children>
</entityConsumer>
<entityActionGroup>
<name>RunActionGroup</name>
<children>
<entityActionField>
<name>RebuildPersonDuplicatesCache</name>
<title>Rebuild Person duplicates cache</title>
<onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js</onActionProcess>
</entityActionField>
<entityActionField>
<name>RebuildOrganisationDuplicatesCache</name>
<title>Rebuild Organisation duplicates cache</title>
</entityActionField>
</children>
</entityActionGroup>
</entityFields>
<recordContainers>
<dbRecordContainer>
......
import("system.logging");
import("DuplicateScanner_lib");
var filterName = "PersonDuplicates";
var targetEntity = "Person_entity";
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
logging.log("resultFields -> " + resultFields);
let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
logging.log("querySelectFields -> " + querySelectFields);
let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
+ " join PERSON on PERSONID = PERSON_ID"
+ " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
logging.log("Löschen von PERSON Dubletten -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
{
logging.log("inside callback -> ");
let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
//Run thru every duplicate result an read out the resultfields
for (let i = 0; i < pPossibleDuplicatesRay.length; i++)
{
for (let b = 0; b < resultFields.length; b++)
{
let entityFieldName = resultFields[b];
let indexFieldName = indexResultFields[entityFieldName];
//logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
//format values
}
}
//call webservice
//reformat results to same structure as before
return pPossibleDuplicatesRay;
};
logging.log("Neu berechnen von PERSON Dubletten -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
......@@ -181,49 +181,97 @@ import("JditoFilter_lib");
//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
//##############################################################################
//####################################Rebuild person duplicates##########################################
//var filterName = "PersonDuplicates";
//var targetEntity = "Person_entity";
//
//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//
//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
//logging.log("resultFields -> " + resultFields);
//
//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
//logging.log("querySelectFields -> " + querySelectFields);
//
//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
// + " join PERSON on PERSONID = PERSON_ID"
// + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
//
//logging.log("Löschen von PERSON Dubletten -> ");
//DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
//
//let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
//{
// let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
//
// //Run thru every duplicate result an read out the resultfields
// for (let i = 0; i < pPossibleDuplicatesRay.length; i++)
// {
// for (let b = 0; b < resultFields.length; b++)
// {
// let entityFieldName = resultFields[b];
// let indexFieldName = indexResultFields[entityFieldName];
// //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
// //format values
// }
// }
// //call webservice
// //reformat results to same structure as before
// return pPossibleDuplicatesRay;
//};
//
//logging.log("Neu berechnen von PERSON Dubletten -> ");
//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
//duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
//
//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
//##################################single scanForDuplicates############################################
var filterName = "PersonDuplicates";
var targetEntity = "Person_entity";
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
logging.log("resultFields -> " + resultFields);
let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
logging.log("querySelectFields -> " + querySelectFields);
let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
+ " join PERSON on PERSONID = PERSON_ID"
+ " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID";
logging.log("Löschen von PERSON Dubletten -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) {
logging.log(" bin in functioin -> " + pPossibleDuplicatesRay.length);
let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
for (let i = 0; i < pPossibleDuplicatesRay.length; i++)
{
for (let b = 0; b < resultFields.length; b++)
{
let entityFieldName = resultFields[b];
let indexFieldName = indexResultFields[entityFieldName];
logging.log("entityFieldName -> " + entityFieldName);
logging.log("indexFieldName -> " + indexFieldName);
logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
}
}
logging.log("wieder draußen -> ");
return pPossibleDuplicatesRay;
};
logging.log("Neu berechnen von PERSON Dubletten -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData,
duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback);
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
//var filterName = "PersonDuplicates";
//var targetEntity = "Person_entity";
//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig);
//
//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT"
// + " join PERSON on PERSONID = PERSON_ID"
// + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"
// + " where Condition for the record to be checked";
//let targetRecordsData = db.table(queryPersonFieldData);
//
//let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, targetRecordsData[0]);
////The first field in this Array must always be the configured id field.
//let idField = entityFieldValuesRay[0][0];
//let idValue = entityFieldValuesRay[0][1];
//
//let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay)
//{
// let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields)
//
// //Run thru every duplicate result an read out the resultfields
// for (let i = 0; i < pPossibleDuplicatesRay.length; i++)
// {
// for (let b = 0; b < resultFields.length; b++)
// {
// let entityFieldName = resultFields[b];
// let indexFieldName = indexResultFields[entityFieldName];
// //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]);
// //format values
// }
// }
// //call webservice
// //reformat results to same structure as before
// return pPossibleDuplicatesRay;
//};
//
////The result values can be accessed as seen above in "formatToJsonAndCallWsCallback"
//DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, entityFieldValuesRay, resultFields,
//idField, idValue, formatToJsonAndCallWsCallback);
\ No newline at end of file
......@@ -11,6 +11,7 @@
<tableViewTemplate>
<name>Filters</name>
<favoriteActionGroup2>TestActionGroup</favoriteActionGroup2>
<favoriteActionGroup3>RunActionGroup</favoriteActionGroup3>
<entityField>#ENTITY</entityField>
<isCreatable v="false" />
<isEditable v="false" />
......
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment