From 0af4c1db904a5765d2cd91106fdf0863cb2ef560 Mon Sep 17 00:00:00 2001 From: "d.buechler" <d.buechler@adito.de> Date: Fri, 20 Sep 2019 11:28:59 +0200 Subject: [PATCH] Moved several functions from Duplicate...Utils to _Duplicate...Utils If a field which is configured to be used in the indexsearch contains no value, no pattern will be added @see _buildFilterPatternConfig --- .../testduplicatescanner/onActionProcess.js | 7 +- process/DuplicateScanner_lib/process.js | 422 +++++++++--------- 2 files changed, 211 insertions(+), 218 deletions(-) diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js index 41c8545687d..90b0bca7931 100644 --- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js @@ -191,10 +191,11 @@ let duplicateFieldsConfig = DuplicateScannerUtils.LoadDuplicateIndexFieldsConfig logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); -let querySelectFields = DuplicateScannerUtils.BuildSelectFieldsFromFieldConfig(duplicateFieldsConfig); - +let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); +logging.log("querySelectFields -> " + querySelectFields); let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" - + " join PERSON on PERSONID = PERSON_ID"; + + " join PERSON on PERSONID = PERSON_ID" + + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; logging.log("Löschen von PERSON Dubletten -> "); DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index 7b7760625cc..f4d3df464d1 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -235,7 +235,7 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction) logging.log("in RebuildDuplicatesCache -> "); let alreadyIdentifiedIds = []; let targetRecordsData = db.table(pQueryTargetRecords); - let useExternalWebservice = DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); + let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); logging.log("useExternalWebservice -> " + useExternalWebservice); logging.log("targetRecordsData -> " + JSON.stringify(targetRecordsData)); @@ -248,7 +248,7 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction) * First it gets checked if the current id has already been identified. If that's the case it'll continue with the next. * Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started */ - let resultFields = DuplicateScannerUtils._loadResultFields(pFilterName, pTargetEntity); + let resultFields = _DuplicateScannerUtils._loadResultFields(pFilterName, pTargetEntity); logging.log("configured resultFields -> " + resultFields); var duplicatesToInsertQueries = []; for (b = 0; b < targetRecordsData.length; b++) @@ -263,7 +263,7 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction) //logging.log("contactid noch nicht bearbeitet -> " + targetRecordsData[b][0]); - let entityFieldValuesRay = DuplicateScannerUtils._buildEntityFieldNameValueRays(pDuplicateFieldsConfig, targetRecordsData[b]); + let entityFieldValuesRay = _DuplicateScannerUtils._buildEntityFieldNameValueRays(pDuplicateFieldsConfig, targetRecordsData[b]); logging.log("entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay)); //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic @@ -305,42 +305,6 @@ pQueryTargetRecords, pDuplicateFieldsConfig, pCallExternalWebserviceFunction) return db.inserts(duplicatesToInsertQueries); } -/* - * Creates an array of arrays containing the entity field name paired with it's value. - * - * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [DB_FIELD, ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration() - * @param {[]} pTargetRecordData One record containing the values for the configured fields. Has to be in the same order as the fields in the first parameter - * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]] - * @example - * pDuplicateFieldsConfig - * ["CONTACTID", "CONTACTID", true, false] - * ["FIRSTNAME", "FIRSTNAME", false, true] - * - * pTargetRecordData - * ["d786045c-8b21-4f22-b6d9-72be9f61c04d", "PETER"] - * - * => [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"], ["FIRSTNAME", "PETER"]] - */ -DuplicateScannerUtils._buildEntityFieldNameValueRays = function(pDuplicateFieldsConfig, pTargetRecordData) -{ - let INDEX_CONFIG_ENTITY_FIELD = 1; - let INDEX_CONFIG_USE_FOR_SEARCH = 3; - let entityFieldValuesRay = []; - /* - * Based on the parameterized filter field names and the values loaded via the query, - * an array which contains records in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created. - * This is mandatory to run the scan for this record. - */ - for (a = 0; a < pDuplicateFieldsConfig.length; a++) - { - logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]); - logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]); - - entityFieldValuesRay.push([pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD], pTargetRecordData[a], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]]) - } - return entityFieldValuesRay; -} - //Array //[DB_FELD, ENTITY_FELD, IS_ID, USE_FOR_SEARCH] //["CONTACTID", "CONTACTID", true, false] @@ -363,37 +327,6 @@ DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration = function(pFilterNa return db.table(duplicateIndexFieldConfigurations); } -DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity) -{ -// select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic -// join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID -// where ds.FILTER_NAME = 'PersonDuplicates' -// and ds.ENTITY_TO_SCAN_NAME = 'Person_entity' -// order by dsic.IS_ID_FIELD desc - - let duplicateResultFields = SqlCondition.begin() - .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID" - , "1=2"); - - logging.log("duplicateResultFields condition-> " + duplicateResultFields); - return db.array(db.COLUMN, duplicateResultFields); -} - -DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetEntity) -{ - let scannerUseExternalWebserviceQuery = SqlCondition.begin() - .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .buildSql("select EXTERNAL_SERVICE_USAGE_ALLOWED from DUPLICATESCANNER" - , "1=2"); - - logging.log("scannerUseExternalWebserviceQuery -> " + scannerUseExternalWebserviceQuery); - let isUseWebservice = db.cell(scannerUseExternalWebserviceQuery); - return (isUseWebservice == 0) ? false : true; -} - DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays, pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCallExternalWebserviceFunction, pUseExternalWebservice) { @@ -405,14 +338,14 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCall logging.log("Found filters -> " + configuredFilters); - let preFilter = DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pFilterFieldValueRays); + let preFilter = _DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pFilterFieldValueRays); logging.log("preFilter welcher Elemente im erlaubten bereich ausgibt -> " + preFilter); if(preFilter == null) return null; - possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100); + possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pTargetEntityResultFields, 100); possibleDuplicates = possibleDuplicates[indexsearch.HITS]; if(pUseExternalWebservice && possibleDuplicates.length > 0 && pCallExternalWebserviceFunction != null) @@ -427,13 +360,205 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pCall return possibleDuplicates; } +DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig = function(pIndexFieldsConfig) +{ + let querySelectFields = ""; + for (let i = 0; i < pIndexFieldsConfig.length; i++) + { + querySelectFields += pIndexFieldsConfig[i][0]; + + if(i < pIndexFieldsConfig.length - 1) + querySelectFields += ", "; + } + return querySelectFields; +} + + +DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields) +{ + let entityIndexFields = {}; + for (let i = 0; i < pEntityFields.length; i++) + { + let indexField = indexsearch.lookupIndexField(pEntityName, pEntityFields[i]); + entityIndexFields[pEntityFields[i]] = indexField; + } + return entityIndexFields; +} + + + + + + + + + + + + + + + + + + + +DuplicateScannerUtils.MergePerson = function(pSourceContactId, pTargetContactId) +{ + let updateStatementsCurrentAlias = []; + let updateStatementsSystemAlias = []; + let deleteStatements = []; + + var sourcePersonId = db.cell("select PERSON_ID from CONTACT where CONTACTID = '" + pSourceContactId + "'"); + var tableInfosCurrentAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias(); + var tableInfosSystemAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias(); + + updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId)); + updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); + + updateStatementsSystemAlias = updateStatementsSystemAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosSystemAlias, pSourceContactId, pTargetContactId)); + + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeletePersonAndContactQuery(sourcePersonId, pSourceContactId)); + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); + + logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias)); + logging.log("deleteStatements -> " + JSON.stringify(deleteStatements)); + + let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias); + let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS"); + let deletedRows = db.deletes(deleteStatements) + + logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias); + logging.log("deletedRows -> " + deletedRows); + + DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId); + + return (affectedRowsCurrentAlias > 0 && deletedRows >= 2); +} + +DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetContactId) +{ + let updateStatements = []; + let deleteStatements = []; + + let querySourceOrganisationId = SqlCondition.begin() + .and("CONTACTID = '" + pSourceContactId + "'") + .buildSql("select ORGANISATION_ID from CONTACT"); + var sourceOrganisationId = db.cell(querySourceOrganisationId); + + var tableInfosCurrentAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias(); + var tableInfosSystemAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias(); + + updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId)); + updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); + + updateStatementsSystemAlias = updateStatementsSystemAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosSystemAlias, pSourceContactId, pTargetContactId)); + + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteOrganisationAndContactQuery(sourceOrganisationId, pSourceContactId)); + deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); + + logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias)); + logging.log("deleteStatements -> " + JSON.stringify(deleteStatements)); + + let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias); + let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS"); + let deletedRows = db.deletes(deleteStatements) + + logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias); + logging.log("deletedRows -> " + deletedRows); + + DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId); + + return (affectedRowsCurrentAlias > 0 && deletedRows >= 2); +} + +function _DuplicateScannerUtils() {} + +var INDEX_FILTER_CONDITION = 0; +var INDEX_COUNT_CHARS_TO_USE = 1; +var INDEX_MAX_RESULTS_THRESHOLD = 2; + +var INDEX_TABLE_NAME = 0; +var INDEX_COLUMN_NAME = 1; +var INDEX_CONDITION = 2; + +_DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetEntity) +{ + let scannerUseExternalWebserviceQuery = SqlCondition.begin() + .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName) + .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) + .buildSql("select EXTERNAL_SERVICE_USAGE_ALLOWED from DUPLICATESCANNER" + , "1=2"); + + logging.log("scannerUseExternalWebserviceQuery -> " + scannerUseExternalWebserviceQuery); + let isUseWebservice = db.cell(scannerUseExternalWebserviceQuery); + return (isUseWebservice == 0) ? false : true; +} + +_DuplicateScannerUtils._loadResultFields = function(pFilterName, pTargetEntity) +{ +// select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic +// join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID +// where ds.FILTER_NAME = 'PersonDuplicates' +// and ds.ENTITY_TO_SCAN_NAME = 'Person_entity' +// order by dsic.IS_ID_FIELD desc + + let duplicateResultFields = SqlCondition.begin() + .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName) + .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) + .buildSql("select dsrfc.ENTITY_FIELD_NAME from DUPLICATESCANNERRESULTFIELDCONFIG dsrfc join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID" + , "1=2"); + + logging.log("duplicateResultFields condition-> " + duplicateResultFields); + return db.array(db.COLUMN, duplicateResultFields); +} + +/* + * Creates an array of arrays containing the entity field name paired with it's value. + * + * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [DB_FIELD, ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration() + * @param {[]} pTargetRecordData One record containing the values for the configured fields. Has to be in the same order as the fields in the first parameter + * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]] + * @example + * pDuplicateFieldsConfig + * ["CONTACTID", "CONTACTID", true, false] + * ["FIRSTNAME", "FIRSTNAME", false, true] + * + * pTargetRecordData + * ["d786045c-8b21-4f22-b6d9-72be9f61c04d", "PETER"] + * + * => [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"], ["FIRSTNAME", "PETER"]] + */ +_DuplicateScannerUtils._buildEntityFieldNameValueRays = function(pDuplicateFieldsConfig, pTargetRecordData) +{ + let INDEX_CONFIG_ENTITY_FIELD = 1; + let INDEX_CONFIG_USE_FOR_SEARCH = 3; + let entityFieldValuesRay = []; + /* + * Based on the parameterized filter field names and the values loaded via the query, + * an array which contains records in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created. + * This is mandatory to run the scan for this record. + */ + for (a = 0; a < pDuplicateFieldsConfig.length; a++) + { + logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]); + logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]); + + entityFieldValuesRay.push([pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD], pTargetRecordData[a], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]]) + } + return entityFieldValuesRay; +} + + //todo doc /* * The pre filter is used to narrow the records to be searched by the duplicate scan service * It loads the target entity and uses filters achieve this. */ //only returns null if the number of results exceeds the allowed count -DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pFilterFieldValueRays) +_DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pFilterFieldValueRays) { var combinedFilter = {}; @@ -491,7 +616,7 @@ DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountChar //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time //Only load "indexsearch.FIELD_ID" and a resultSet size of 1 - let searchResult = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], + let searchResult = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], [], 1);//todo use again after this has been fixed!! insert the local id after fix logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]); @@ -537,7 +662,7 @@ DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountChar return null; } -DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows) +_DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows) { let parsedFilterAsPatternTerm = indexsearch.buildQueryFromSearchCondition(pPreFilterJson); logging.log("pTargetEntity -> " + pTargetEntity); @@ -549,9 +674,9 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, //.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID") .setRows(pResultSetRows); - indexQuery = DuplicateScannerUtils._setResultFields(indexQuery, pResultFields); + indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields); - let filterPatternConfig = DuplicateScannerUtils._buildFilterPatternConfig(pEntityFieldValueRays, pTargetEntity); + let filterPatternConfig = _DuplicateScannerUtils._buildFilterPatternConfig(pEntityFieldValueRays, pTargetEntity); if(filterPatternConfig != null) { let filterPatternString = indexsearch.buildPatternString(filterPatternConfig); @@ -565,7 +690,7 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, return indexsearch.searchIndex(indexQuery); } -DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) +_DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) { logging.log("pResultFields.length -> " + pResultFields.length); let resultIndexFields = []; @@ -600,7 +725,7 @@ DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) return pIndexQuery; } -DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRays, pTargetEntity) +_DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRays, pTargetEntity) { //The index to get the fields value for USE_FOR_SEARCH //Structure of this array is [ENTITY_FIELD, FIELD_VALUE, USE_FOR_SEARCH] @@ -613,11 +738,12 @@ DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRays filterPatternConfig = indexsearch.createPatternConfig(); for (let i = 0; i < pEntityFieldValueRays.length; i++) { - if(pEntityFieldValueRays[i][INDEX_CONFIG_USE_FOR_SEARCH] == 0) - continue; let entityFieldValue = pEntityFieldValueRays[i][1]; let entityFieldName = pEntityFieldValueRays[i][0]; + if(pEntityFieldValueRays[i][INDEX_CONFIG_USE_FOR_SEARCH] == 0 || entityFieldValue == "") + continue; + logging.log("entityFieldValue -> " + entityFieldValue); logging.log("entityFieldName -> " + entityFieldName); @@ -634,135 +760,6 @@ DuplicateScannerUtils._buildFilterPatternConfig = function(pEntityFieldValueRays return filterPatternConfig; } -DuplicateScannerUtils.BuildSelectFieldsFromFieldConfig = function(pIndexFieldsConfig) -{ - let querySelectFields = ""; - for (let i = 0; i < pIndexFieldsConfig.length; i++) - { - querySelectFields += pIndexFieldsConfig[i][0]; - - if(i < pIndexFieldsConfig.length - 1) - querySelectFields += ", "; - } - return querySelectFields; -} - - -DuplicateScannerUtils.TranslateEntityToIndexFields = function(pEntityName, pEntityFields) -{ - let entityIndexFields = {}; - for (let i = 0; i < pEntityFields.length; i++) - { - let indexField = indexsearch.lookupIndexField(pEntityName, pEntityFields[i]); - entityIndexFields[pEntityFields[i]] = indexField; - } - return entityIndexFields; -} - - - - - - - - - - - - - - - - - - - -DuplicateScannerUtils.MergePerson = function(pSourceContactId, pTargetContactId) -{ - let updateStatementsCurrentAlias = []; - let updateStatementsSystemAlias = []; - let deleteStatements = []; - - var sourcePersonId = db.cell("select PERSON_ID from CONTACT where CONTACTID = '" + pSourceContactId + "'"); - var tableInfosCurrentAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias(); - var tableInfosSystemAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias(); - - updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId)); - updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); - - updateStatementsSystemAlias = updateStatementsSystemAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosSystemAlias, pSourceContactId, pTargetContactId)); - - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeletePersonAndContactQuery(sourcePersonId, pSourceContactId)); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); - - logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias)); - logging.log("deleteStatements -> " + JSON.stringify(deleteStatements)); - - let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias); - let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS"); - let deletedRows = db.deletes(deleteStatements) - - logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias); - logging.log("deletedRows -> " + deletedRows); - - DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId); - - return (affectedRowsCurrentAlias > 0 && deletedRows >= 2); -} - -DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetContactId) -{ - let updateStatements = []; - let deleteStatements = []; - - let querySourceOrganisationId = SqlCondition.begin() - .and("CONTACTID = '" + pSourceContactId + "'") - .buildSql("select ORGANISATION_ID from CONTACT"); - var sourceOrganisationId = db.cell(querySourceOrganisationId); - - var tableInfosCurrentAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosCurrentAlias(); - var tableInfosSystemAlias = _DuplicateScannerUtils._getMergeUpdateTableInfosSystemAlias(); - - updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosCurrentAlias, pSourceContactId, pTargetContactId)); - updateStatementsCurrentAlias = updateStatementsCurrentAlias.concat(_DuplicateScannerUtils._buildUpdateAttachParticipantsToNewContactQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); - - updateStatementsSystemAlias = updateStatementsSystemAlias.concat(_DuplicateScannerUtils._buildUpdateContactIdStatements(tableInfosSystemAlias, pSourceContactId, pTargetContactId)); - - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteRemoveObsoleteParticipantsRecordsQuery("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", pSourceContactId, pTargetContactId)); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteOrganisationAndContactQuery(sourceOrganisationId, pSourceContactId)); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); - - logging.log("updateStatementsCurrentAlias -> " + JSON.stringify(updateStatementsCurrentAlias)); - logging.log("deleteStatements -> " + JSON.stringify(deleteStatements)); - - let affectedRowsCurrentAlias = db.updates(updateStatementsCurrentAlias); - let affectedRowsSystemAlias = db.updates(updateStatementsSystemAlias, "_____SYSTEMALIAS"); - let deletedRows = db.deletes(deleteStatements) - - logging.log("affectedRowsCurrentAlias -> " + affectedRowsCurrentAlias); - logging.log("deletedRows -> " + deletedRows); - - DuplicateScannerUtils.DeleteCachedDuplicate(pSourceContactId); - - return (affectedRowsCurrentAlias > 0 && deletedRows >= 2); -} - -function _DuplicateScannerUtils() {} - -var INDEX_FILTER_CONDITION = 0; -var INDEX_COUNT_CHARS_TO_USE = 1; -var INDEX_MAX_RESULTS_THRESHOLD = 2; - -var INDEX_TABLE_NAME = 0; -var INDEX_COLUMN_NAME = 1; -var INDEX_CONDITION = 2; - -_DuplicateScannerUtils._getIdOfIdenticalCachedDuplicatesCluster = function (pRecordIdValueToIgnore, duplicatesRay) -{ - -} - _DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay, pTargetEntity) { let duplicatesToInsertQueries = []; @@ -779,11 +776,6 @@ _DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicat return duplicatesToInsertQueries; } -_DuplicateScannerUtils._AddRecordsToCachedDuplicatesCluster = function (pRecordIdToAdd, pClusterId) -{ - -} - _DuplicateScannerUtils._deleteDuplicateClusters = function () { db.deleteData("DUPLICATECLUSTERS"); -- GitLab