Skip to content
Snippets Groups Projects
Commit bf8c1ff4 authored by Dominik Lechner's avatar Dominik Lechner
Browse files

Changes in Actions for rebuilding Duplicates

parent 87ec3b10
No related branches found
No related tags found
No related merge requests found
......@@ -3,18 +3,21 @@ import("DuplicateScanner_lib");
var filterName = "OrganisationDuplicates";
var targetEntity = "Organisation_entity";
var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
var recordBlockSize = 500;
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
//logging.log("resultFields -> " + resultFields);
logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
logging.log(filterName + ": resultFields -> " + resultFields);
logging.log("Delete all " + targetEntity + " duplicates");
logging.log(filterName + ": Löschen von ORGANISATION Dubletten -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
logging.log("Recalculate all " + targetEntity + " duplicates");
logging.log(filterName + ": Neu berechnen von ORGANISATION Dubletten -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
duplicateFieldsConfig, resultFields, null);
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
\ No newline at end of file
logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file
import("system.util");
import("Employee_lib");
import("system.process");
import("system.logging");
import("DuplicateScanner_lib");
//let processParameters = {
// filterName: "PersonDuplicates",
// targetEntity: "Person_entity" //process.execute is only able to handle strings
//}
//let userId = EmployeeUtils.getCurrentUserId();
//
//try
//{
// let processId = "manualrun_rebuild_duplicatecache_" + util.getNewUUID();
// process.executeTimer(processId, "RebuildDuplicatesCache_serverProcess", 0, true, false, process.TIMERTYPE_SERVER_RUN, userId, false, process.THREADPRIORITY_LOW)
// process.stopTimer(processId);
//}
//catch (exception)
//{
// logging.log(" exception-> " + exception);
//}
var filterName = "PersonDuplicates";
var targetEntity = "Person_entity";
var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity);
var recordBlockSize = 500;
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
//logging.log("resultFields -> " + resultFields);
logging.log(filterName + ": duplicateFieldsConfig -> " + duplicateFieldsConfig);
logging.log(filterName + ": resultFields -> " + resultFields);
logging.log("Delete all " + targetEntity + " duplicates");
logging.log(filterName + ": Löschen von PERSON Dubletten -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
logging.log("Recalculate all " + targetEntity + " duplicates");
logging.log("Neu berechnen von PERSON Dubletten -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
duplicateFieldsConfig, resultFields, null);
logging.log(filterName + ": Refresh Unrelated Duplicates von ORGANISATION Dubletten -> ");
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
logging.log(filterName + ": Done rebuilding ");
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment