From f64f213b429c91a9dd7f1fa1ac1c8713dbdca778 Mon Sep 17 00:00:00 2001 From: Sebastian Listl <s.listl@adito.de> Date: Tue, 29 Sep 2020 12:03:58 +0200 Subject: [PATCH] DuplicateScanner_lib changes to follow coding guidelines --- .../onActionProcess.js | 4 +- .../onActionProcess.js | 4 +- .../testduplicatescanner/onActionProcess.js | 10 +- .../recordcontainers/db/onDBInsert.js | 2 +- process/DuplicateScanner_lib/process.js | 104 +++++++----------- process/Leadimport_lib/process.js | 4 +- .../process.js | 6 +- .../process.js | 4 +- 8 files changed, 59 insertions(+), 79 deletions(-) diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js index c2cbeb99e9..bbe3de9139 100644 --- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js @@ -4,13 +4,13 @@ import("DuplicateScanner_lib"); var filterName = "OrganisationDuplicates"; var targetEntity = "Organisation_entity"; -var recordBlockSize = DuplicateScannerUtils.GetBlockSize(); +var recordBlockSize = DuplicateScannerUtils.getBlockSize(); logging.log(filterName + ": Delete duplicates -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); +DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); logging.log(filterName + ": Refresh unrelated duplicates -> "); DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js index b678496914..4b3d2c9bc6 100644 --- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js @@ -4,13 +4,13 @@ import("DuplicateScanner_lib"); var filterName = "PersonDuplicates"; var targetEntity = "Person_entity"; -var recordBlockSize = DuplicateScannerUtils.GetBlockSize(); +var recordBlockSize = DuplicateScannerUtils.getBlockSize(); logging.log(filterName + ": Delete duplicates -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); +DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); logging.log(filterName + ": Refresh unrelated duplicates -> "); DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js index 17f12757ab..47a4d2b39f 100644 --- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js @@ -165,7 +165,7 @@ import("JditoFilter_lib"); //DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity("Person_entity"); // //logging.log("Neu berechnen von PERSON Dubletten -> "); -//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, +//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, //tmpFieldsInFilterRay, resultFieldsIdFieldName); // //DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); @@ -185,7 +185,7 @@ import("JditoFilter_lib"); //DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity) // //logging.log("Neu berechnen von ORGANISATION Dubletten -> "); -//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, +//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, //tmpFieldsInFilterRay, resultFieldsIdFieldName); // //DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); @@ -197,7 +197,7 @@ import("JditoFilter_lib"); //var targetEntity = "Person_entity"; // //let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); -//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); +//let resultFields = DuplicateScannerUtils.getResultFields(filterName, targetEntity); // //logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); //logging.log("resultFields -> " + resultFields); @@ -233,7 +233,7 @@ import("JditoFilter_lib"); //}; // //logging.log("Neu berechnen von PERSON Dubletten -> "); -//DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, +//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, //duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); // //DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); @@ -329,7 +329,7 @@ import("JditoFilter_lib"); // //var filterName = "PersonDuplicates"; //var targetEntity = "Person_entity"; -//DuplicateScannerUtils.LoadEntityRecords(targetEntity, entityFieldsToLoad, 0, 50); +//DuplicateScannerUtils.getEntityRecords(targetEntity, entityFieldsToLoad, 0, 50); //Beispiel 1: //Feld mit verknüpftem Consumer diff --git a/entity/Person_entity/recordcontainers/db/onDBInsert.js b/entity/Person_entity/recordcontainers/db/onDBInsert.js index b0ba83b532..8a497d507e 100644 --- a/entity/Person_entity/recordcontainers/db/onDBInsert.js +++ b/entity/Person_entity/recordcontainers/db/onDBInsert.js @@ -42,7 +42,7 @@ WorkflowSignalSender.inserted(); ////If we found duplicates, cache them //if(scanResults != null && scanResults.length > 0) //{ -// let insertCount = DuplicateScannerUtils.CacheNewScanResults(contactId, scanResults, targetEntity); +// let insertCount = DuplicateScannerUtils.cacheNewScanResults(contactId, scanResults, targetEntity); // logging.log("insertCount -> " + insertCount); // // let userId = EmployeeUtils.getCurrentUserId(); diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index b8df3fa9c7..23be5bb109 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -50,20 +50,15 @@ DuplicateScannerUtils.loadFilters = function(pFilterName, pTargetEntity) */ DuplicateScannerUtils.DeleteCachedDuplicate = function(pDuplicateId) { - - - var query = newSelect("count(ID), CLUSTERID") + var [countDuplicatesInClusterWithoutParameterId, clusterId] = newSelect("count(ID), CLUSTERID") .from("DUPLICATECLUSTERS") .where("DUPLICATECLUSTERS.CLUSTERID", newSelect("CLUSTERID") .from("DUPLICATECLUSTERS") .where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId).build(), SqlBuilder.IN()) .and("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId, SqlBuilder.NOT_EQUAL()) - .groupBy("CLUSTERID"); - - var countAndClusterId = query.arrayRow(); - let countDuplicatesInClusterWithoutParameterId = countAndClusterId[0]; - let clusterId = countAndClusterId[1]; + .groupBy("CLUSTERID") + .arrayRow(); //If only one duplicate would be remaining, //the whole cluster has to be deleted because there are no more duplicates. @@ -71,16 +66,12 @@ DuplicateScannerUtils.DeleteCachedDuplicate = function(pDuplicateId) //therefore check for smaller/equals 1 if(countDuplicatesInClusterWithoutParameterId <= 1) { - let deleteStatements = []; - deleteStatements.push(["DUPLICATECLUSTERS", newWhere("DUPLICATECLUSTERS.CLUSTERID", clusterId).build()]); - deleteStatements.push(["UNRELATEDDUPLICATES", newWhere("UNRELATEDDUPLICATES.CLUSTERID", clusterId).build()]); - - db.deletes(deleteStatements); + newWhere("DUPLICATECLUSTERS.CLUSTERID", clusterId).deleteData(); + newWhere("UNRELATEDDUPLICATES.CLUSTERID", clusterId).deleteData(); } else { - newWhereIfSet("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId) - .deleteData(true, "DUPLICATECLUSTERS"); + newWhereIfSet("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId).deleteData(); //Delete all records where this duplicateId is mentioned DuplicateScannerUtils.DeleteAllUnrelatedDuplicateRelations(pDuplicateId); @@ -96,8 +87,7 @@ DuplicateScannerUtils.DeleteCachedDuplicate = function(pDuplicateId) */ DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity = function(pTargetEntity) { - return newWhereIfSet("DUPLICATECLUSTERS.TARGET_ENTITY", pTargetEntity) - .deleteData(true, "DUPLICATECLUSTERS"); + return newWhereIfSet("DUPLICATECLUSTERS.TARGET_ENTITY", pTargetEntity).deleteData(); } /* @@ -114,29 +104,20 @@ DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations = function(pTargetEntit /* * Update all records with the current valid clusterId where the same duplicateId combination exists */ - let INDEX_NEW_CLUSTERID = 0; - let INDEX_OLD_CLUSTERID = 1; - - let query = newSelect("dc1.CLUSTERID, ud.CLUSTERID") + let clusterIdChanges = newSelect("dc1.CLUSTERID, ud.CLUSTERID") .from("UNRELATEDDUPLICATES", "ud") .join("DUPLICATECLUSTERS", "dc1.DUPLICATEID = ud.SOURCEDUPLICATEID", "dc1") .join("DUPLICATECLUSTERS", "dc2.DUPLICATEID = ud.UNRELATEDDUPLICATEID", "dc2") - .where(["DUPLICATECLUSTERS", "TARGET_ENTITY", "dc1"], pTargetEntity); - - let newIdOldIdRay = query.table(); - let updateStatements = []; + .where(["DUPLICATECLUSTERS", "TARGET_ENTITY", "dc1"], pTargetEntity) + .table(); - //Build update statements to set new clusterIds - for (let i = 0; i < newIdOldIdRay.length; i++) + let updateStatements = clusterIdChanges.map(function ([newClusterId, oldClusterId]) { - let newClusterId = newIdOldIdRay[i][INDEX_NEW_CLUSTERID]; - let oldClusterId = newIdOldIdRay[i][INDEX_OLD_CLUSTERID]; - let updateColumns = ["CLUSTERID"]; - - let updateStatement = ["UNRELATEDDUPLICATES", updateColumns, null, [newClusterId], newWhere("UNRELATEDDUPLICATES.CLUSTERID", oldClusterId).build()]; - - updateStatements.push(updateStatement); - } + return newWhere("UNRELATEDDUPLICATES.CLUSTERID", oldClusterId).buildUpdateStatement({ + "CLUSTERID": newClusterId + }, "UNRELATEDDUPLICATES"); + }); + db.updates(updateStatements); /* @@ -144,7 +125,7 @@ DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations = function(pTargetEntit * All records with a nonexistend clusterid can now be deleted because they haven't been detected as a duplicate any more. */ newWhere("UNRELATEDDUPLICATES.CLUSTERID", newSelect("dc1.CLUSTERID").from("DUPLICATECLUSTERS", "dc1"), SqlBuilder.NOT_IN()) - .deleteData(true, "UNRELATEDDUPLICATES"); + .deleteData(); } /* @@ -159,11 +140,11 @@ DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations = function(pTargetEntit */ DuplicateScannerUtils.CreateUnrelatedDuplicateRelation = function(pSourceContactId, pUnrelatedContactId, pClusterId) { - let newUid = util.getNewUUID(); - let columns = ["ID", "SOURCEDUPLICATEID", "UNRELATEDDUPLICATEID", "CLUSTERID"]; - let values = [newUid, pSourceContactId, pUnrelatedContactId, pClusterId]; - - return db.insertData("UNRELATEDDUPLICATES", columns, null, values); + return new SqlBuilder().insertFields({ + "UNRELATEDDUPLICATEID": pUnrelatedContactId, + "SOURCEDUPLICATEID": pSourceContactId, + "CLUSTERID": pClusterId + }, "UNRELATEDDUPLICATES", "ID"); } /* @@ -191,7 +172,7 @@ DuplicateScannerUtils.DeleteUnrelatedDuplicateRelation = function(pSourceDuplica { return newWhere("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pSourceDuplicateId) .and("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pUnrelatedDuplicateId) - .deleteData(true, "UNRELATEDDUPLICATES"); + .deleteData(); } /* @@ -269,7 +250,7 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId) * @example * var filterName = "PersonDuplicates"; * var targetEntity = "Person_entity"; - * var recordBlockSize = DuplicateScannerUtils.GetBlockSize(); + * var recordBlockSize = DuplicateScannerUtils.getBlockSize(); * * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) * { @@ -292,15 +273,15 @@ DuplicateScannerUtils.GetCachedDuplicatesForClusterId = function(pClusterId) * * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); * - * DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, pRecordsBlockSize, formatToJsonAndCallWsCallback); + * DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, pRecordsBlockSize, formatToJsonAndCallWsCallback); * * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); */ -DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEntity, +DuplicateScannerUtils.rebuildDuplicatesCache = function(pFilterName, pTargetEntity, pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback) { let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); - let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity); + let resultFields = DuplicateScannerUtils.getResultFields(filterName, targetEntity); let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); @@ -312,13 +293,13 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti entityFields.push(entityIdField); - let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFields, 0, pRecordsBlockSize); + let targetRecords = DuplicateScannerUtils.getEntityRecords(pTargetEntity, entityFields, 0, pRecordsBlockSize); let currentRecordIndex = pRecordsBlockSize; while(targetRecords.length > 0) { - foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pTargetEntity, targetRecords, + foundDuplicateIds = DuplicateScannerUtils.scanRecords(pTargetEntity, targetRecords, entityFieldConfigs, resultFields, useExternalWebservice, pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds, indexPattern, entityIdField); @@ -329,14 +310,14 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti { break; } - targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFields, + targetRecords = DuplicateScannerUtils.getEntityRecords(pTargetEntity, entityFields, currentRecordIndex, pRecordsBlockSize); currentRecordIndex += pRecordsBlockSize; } } -DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData, +DuplicateScannerUtils.scanRecords = function(pTargetEntity, pTargetRecordsData, pEntityFieldConfigs, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds, pIndexPattern, pEntityIdField) { let foundDuplicateIds = []; @@ -398,7 +379,7 @@ DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData, * @param {String} pTargetEntity Entity which has been configured * @returns {String} A clusterid if a matching cluster has been found, otherwise "" */ -DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds, pTargetEntity) +DuplicateScannerUtils.cacheNewScanResults = function(pNewRecordId, pDuplicateIds, pTargetEntity) { let duplicateIds = []; //Run thru every duplicate result and read out the id. @@ -409,7 +390,7 @@ DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds duplicateIds.push(duplicateContactId); } - let clusterId = DuplicateScannerUtils.GetClusterWithIdenticalDuplicates(duplicateIds); + let clusterId = DuplicateScannerUtils.getClusterWithIdenticalDuplicates(duplicateIds); //If no cluster has beend found, create a new one with all found duplicateIds, //otherwise add the id to the existing cluster @@ -434,7 +415,7 @@ DuplicateScannerUtils.CacheNewScanResults = function(pNewRecordId, pDuplicateIds * @param {String[]} pDuplicateIds Duplicate ids which should be in the same cluster * @returns {String} Id of the cluster which contains all given duplicate ids or "" */ -DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds) +DuplicateScannerUtils.getClusterWithIdenticalDuplicates = function(pDuplicateIds) { let RESULT_NO_CLUSTER_FOUND = ""; @@ -473,7 +454,7 @@ DuplicateScannerUtils.GetClusterWithIdenticalDuplicates = function(pDuplicateIds return foundClusterId; } -DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad) +DuplicateScannerUtils.getEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad) { let getRowsConfig = entities.createConfigForLoadingRows() .entity(pTargetEntity) @@ -489,7 +470,7 @@ DuplicateScannerUtils.LoadEntityRecords = function(pTargetEntity, pEntityFields, * @param {String} pTargetEntity Entity which has been configured * @returns {String[]} Resultfields as array */ -DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity) +DuplicateScannerUtils.getResultFields = function(pFilterName, pTargetEntity) { return newSelect("dsrfc.ENTITY_FIELD_NAME") .from("DUPLICATESCANNERRESULTFIELDCONFIG dsrfc") @@ -570,11 +551,10 @@ DuplicateScannerUtils.LoadResultFields = function(pFilterName, pTargetEntity) * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck, * formatToJsonAndCallWsCallback); */ -DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck, - pFormatValuesConsumeWebserviceCallback) +DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck, pFormatValuesConsumeWebserviceCallback) { let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); - let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity); + let resultFields = DuplicateScannerUtils.getResultFields(pFilterName, pTargetEntity); let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); @@ -792,12 +772,12 @@ DuplicateScannerUtils.BuildEntityFieldConfigValueRays = function(pDuplicateField return entityFieldConfigValuesRay.length > 0 ? entityFieldConfigValuesRay : [["", ""]]; } -DuplicateScannerUtils.GetBlockSize = function() +DuplicateScannerUtils.getBlockSize = function() { return project.getPreferenceValue("custom.duplicates.dataBlockSize", "5000"); } -DuplicateScannerUtils.GetEntityFieldsFromConfig = function(pFilterName, pTargetEntity) +DuplicateScannerUtils.getEntityFieldsFromConfig = function(pFilterName, pTargetEntity) { let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); if(indexPattern == null || indexPattern == "") @@ -1027,7 +1007,7 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE * @param {String} pTargetEntity Entity which has been configured * @param {String} pIndexPatternWithValues The pattern used to search. Has to contain the values already. * @param {String} pIdFilter The filter pattern used to exclude the current record from the result. - * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields" + * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.getResultFields" * @param {String} pResultSetRows todo * @returns {[["key", "value"]] || null} Array of Key-Value-Pairs based on the configured pResultFields, if no pattern exists null */ @@ -1057,7 +1037,7 @@ _DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pIndexPatternW * (indexsearch.FIELD_ID, indexsearch.FIELD_TITLE, indexsearch.FIELD_TYPE, indexsearch.FIELD_DESCRIPTION) * * @param {IndexQuery} pIndexQuery An indexquery created with "indexsearch.createIndexQuery()" - * @param {String[]} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields" + * @param {String[]} pResultFields The result field config. Use "DuplicateScannerUtils.getResultFields" * @returns {IndexQuery} IndexQuery with the resultfields set */ _DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) diff --git a/process/Leadimport_lib/process.js b/process/Leadimport_lib/process.js index 573bbca214..d5b64428bd 100644 --- a/process/Leadimport_lib/process.js +++ b/process/Leadimport_lib/process.js @@ -889,7 +889,7 @@ LeadImportUtils.scanLeadDups = function(pAllContactData) let scanResultsOrg = DuplicateScannerUtils.ScanForDuplicates("OrganisationDuplicates", "Organisation_entity", orgObj, null); if (scanResultsOrg != null && scanResultsOrg.length > 0) { - let insertCountOrg = DuplicateScannerUtils.CacheNewScanResults(orgObj["CONTACTID"], scanResultsOrg, "Organisation_entity"); + let insertCountOrg = DuplicateScannerUtils.cacheNewScanResults(orgObj["CONTACTID"], scanResultsOrg, "Organisation_entity"); dupOrg = true; } } @@ -898,7 +898,7 @@ LeadImportUtils.scanLeadDups = function(pAllContactData) let scanResultsPers = DuplicateScannerUtils.ScanForDuplicates("PersonDuplicates", "Person_entity", persObj, null); if (scanResultsPers != null && scanResultsPers.length > 0) { - let insertCountPers = DuplicateScannerUtils.CacheNewScanResults(persObj["CONTACTID"], scanResultsPers, "Person_entity"); + let insertCountPers = DuplicateScannerUtils.cacheNewScanResults(persObj["CONTACTID"], scanResultsPers, "Person_entity"); dupPers = true; } } diff --git a/process/RebuildAllDuplicateCaches_serverProcess/process.js b/process/RebuildAllDuplicateCaches_serverProcess/process.js index 900db39e93..ceb9f3560f 100644 --- a/process/RebuildAllDuplicateCaches_serverProcess/process.js +++ b/process/RebuildAllDuplicateCaches_serverProcess/process.js @@ -24,13 +24,13 @@ import("DuplicateScanner_lib"); var filterName = "PersonDuplicates"; var targetEntity = "Person_entity"; -var recordBlockSize = DuplicateScannerUtils.GetBlockSize(); +var recordBlockSize = DuplicateScannerUtils.getBlockSize(); logging.log(filterName + ": Delete duplicates -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); +DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); logging.log(filterName + ": Refresh unrelated duplicates -> "); DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); @@ -47,7 +47,7 @@ logging.log(filterName + ": Delete duplicates -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); +DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); logging.log(filterName + ": Refresh unrelated duplicates -> "); DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); diff --git a/process/RebuildDuplicatesCache_serverProcess/process.js b/process/RebuildDuplicatesCache_serverProcess/process.js index 4a932ce9b3..ed2a577e4b 100644 --- a/process/RebuildDuplicatesCache_serverProcess/process.js +++ b/process/RebuildDuplicatesCache_serverProcess/process.js @@ -25,13 +25,13 @@ import("DuplicateScanner_lib"); var filterName = vars.get("$local.filterName"); var targetEntity = vars.get("$local.targetEntity"); -var recordBlockSize = DuplicateScannerUtils.GetBlockSize(); +var recordBlockSize = DuplicateScannerUtils.getBlockSize(); logging.log(filterName + ": Delete duplicates -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); +DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); logging.log(filterName + ": Refresh unrelated duplicates -> "); DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); -- GitLab