diff --git a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScanner.xml b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScanner.xml index a855de94e620b637048bd9a6d3d7cea414454193..fc7be7e82c94f2486f24da75d07ec4d92ba20c4f 100644 --- a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScanner.xml +++ b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScanner.xml @@ -11,7 +11,7 @@ <column name="FILTER_NAME" type="NVARCHAR(200)"> <constraints nullable="false"/> </column> - <column name="EXTERNAL_SERVICE_USAGE_ALLOWED" type="BOOLEAN" > + <column name="EXTERNAL_SERVICE_USAGE_ALLOWED" type="INTEGER" > <constraints nullable="false"/> </column> <column name="USER_NEW" type="NVARCHAR(50)"> diff --git a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml index 03bc02bd1dcff056b800f7e611e66e1fd44043ad..de63025985eab1691645820c6c04b4830588ac4a 100644 --- a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml +++ b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateScannerIndexConfig.xml @@ -8,8 +8,14 @@ <column name="DUPLICATESCANNER_ID" type="CHAR(36)"> <constraints nullable="false" /> </column> - <column name="TARGET_CONTEXT" type="NVARCHAR(100)" /> + <column name="DB_FIELD_NAME" type="NVARCHAR(100)" /> <column name="ENTITY_FIELD_NAME" type="NVARCHAR(100)" /> + <column name="IS_ID_FIELD" type="INTEGER" > + <constraints nullable="false"/> + </column> + <column name="USE_FOR_INDEX_DUPLICATE_SEARCH" type="INTEGER" > + <constraints nullable="false"/> + </column> <column name="USER_NEW" type="NVARCHAR(50)"> <constraints nullable="false"/> </column> diff --git a/aliasDefinition/Data_alias/Data_alias.aod b/aliasDefinition/Data_alias/Data_alias.aod index 9f9c36632c42a2f613e7c1aba6eec5f41de9696c..9b3fef4808685c1d13f149af2148d781e8cdcf04 100644 --- a/aliasDefinition/Data_alias/Data_alias.aod +++ b/aliasDefinition/Data_alias/Data_alias.aod @@ -11264,8 +11264,8 @@ <name>EXTERNAL_SERVICE_USAGE_ALLOWED</name> <dbName></dbName> <primaryKey v="false" /> - <columnType v="16" /> - <size v="1" /> + <columnType v="4" /> + <size v="10" /> <scale v="0" /> <notNull v="true" /> <isUnique v="false" /> @@ -11460,20 +11460,6 @@ <syncIds></syncIds> </auditSyncConfig> <entityFields> - <entityFieldDb> - <name>TARGET_CONTEXT</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="12" /> - <size v="100" /> - <scale v="0" /> - <notNull v="false" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> <entityFieldDb> <name>DATE_EDIT</name> <dbName></dbName> @@ -11572,6 +11558,48 @@ <title></title> <description></description> </entityFieldDb> + <entityFieldDb> + <name>USE_FOR_INDEX_DUPLICATE_SEARCH</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="4" /> + <size v="10" /> + <scale v="0" /> + <notNull v="true" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + <entityFieldDb> + <name>DB_FIELD_NAME</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="12" /> + <size v="100" /> + <scale v="0" /> + <notNull v="false" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + <entityFieldDb> + <name>IS_ID_FIELD</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="4" /> + <size v="10" /> + <scale v="0" /> + <notNull v="true" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> </entityFields> </entityDb> </entities> diff --git a/entity/DuplicateScannerIndexConfig_entity/DuplicateScannerIndexConfig_entity.aod b/entity/DuplicateScannerIndexConfig_entity/DuplicateScannerIndexConfig_entity.aod index cd4d39a1eee30a2733a23957157dd65c5273a5f6..e31828c7f6325fcfdbbcde7660f0b1e23ff1b1d2 100644 --- a/entity/DuplicateScannerIndexConfig_entity/DuplicateScannerIndexConfig_entity.aod +++ b/entity/DuplicateScannerIndexConfig_entity/DuplicateScannerIndexConfig_entity.aod @@ -8,8 +8,8 @@ <name>#PROVIDER</name> </entityProvider> <entityField> - <name>TARGET_CONTEXT</name> - <title>Context</title> + <name>DB_FIELD_NAME</name> + <title>Database field name</title> </entityField> <entityField> <name>ENTITY_FIELD_NAME</name> @@ -54,6 +54,18 @@ <name>USER_EDIT</name> <valueProcess>%aditoprj%/entity/DuplicateScannerIndexConfig_entity/entityfields/user_edit/valueProcess.js</valueProcess> </entityField> + <entityField> + <name>USE_FOR_INDEX_DUPLICATE_SEARCH</name> + <title>Use for duplicate search</title> + <contentType>BOOLEAN</contentType> + <valueProcess>%aditoprj%/entity/DuplicateScannerIndexConfig_entity/entityfields/use_for_index_duplicate_search/valueProcess.js</valueProcess> + </entityField> + <entityField> + <name>IS_ID_FIELD</name> + <title>Is ID field</title> + <contentType>BOOLEAN</contentType> + <valueProcess>%aditoprj%/entity/DuplicateScannerIndexConfig_entity/entityfields/is_id_field/valueProcess.js</valueProcess> + </entityField> </entityFields> <recordContainers> <dbRecordContainer> @@ -62,7 +74,7 @@ <conditionProcess>%aditoprj%/entity/DuplicateScannerIndexConfig_entity/recordcontainers/recordcontainer/conditionProcess.js</conditionProcess> <linkInformation> <linkInformation> - <name>e8beb2e0-6eae-401c-8c72-3d91d440c9c3</name> + <name>20b6821f-5de0-4fd6-b30f-38749dd075e8</name> <tableName>DUPLICATESCANNERINDEXCONFIG</tableName> <primaryKey>ID</primaryKey> <isUIDTable v="false" /> @@ -71,36 +83,44 @@ </linkInformation> <recordFieldMappings> <dbRecordFieldMapping> - <name>UID.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.ID</recordfield> + <name>DATE_EDIT.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.DATE_EDIT</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>TARGET_CONTEXT.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.TARGET_CONTEXT</recordfield> + <name>DATE_NEW.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.DATE_NEW</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>ENTITY_FIELD_NAME.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.ENTITY_FIELD_NAME</recordfield> + <name>DB_FIELD_NAME.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.DB_FIELD_NAME</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> <name>DUPLICATESCANNER_ID.value</name> <recordfield>DUPLICATESCANNERINDEXCONFIG.DUPLICATESCANNER_ID</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>USER_NEW.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.USER_NEW</recordfield> + <name>ENTITY_FIELD_NAME.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.ENTITY_FIELD_NAME</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>USER_EDIT.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.USER_EDIT</recordfield> + <name>IS_ID_FIELD.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.IS_ID_FIELD</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>DATE_NEW.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.DATE_NEW</recordfield> + <name>UID.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.ID</recordfield> </dbRecordFieldMapping> <dbRecordFieldMapping> - <name>DATE_EDIT.value</name> - <recordfield>DUPLICATESCANNERINDEXCONFIG.DATE_EDIT</recordfield> + <name>USE_FOR_INDEX_DUPLICATE_SEARCH.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.USE_FOR_INDEX_DUPLICATE_SEARCH</recordfield> + </dbRecordFieldMapping> + <dbRecordFieldMapping> + <name>USER_EDIT.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.USER_EDIT</recordfield> + </dbRecordFieldMapping> + <dbRecordFieldMapping> + <name>USER_NEW.value</name> + <recordfield>DUPLICATESCANNERINDEXCONFIG.USER_NEW</recordfield> </dbRecordFieldMapping> </recordFieldMappings> </dbRecordContainer> diff --git a/entity/DuplicateScannerIndexConfig_entity/entityfields/is_id_field/valueProcess.js b/entity/DuplicateScannerIndexConfig_entity/entityfields/is_id_field/valueProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..a96005514c80a3e1697a52248b4a60b983d7efb2 --- /dev/null +++ b/entity/DuplicateScannerIndexConfig_entity/entityfields/is_id_field/valueProcess.js @@ -0,0 +1,12 @@ +import("system.result"); +import("system.vars"); +import("system.logging"); + +logging.log("2this value -> " + vars.get("$this.value")); +logging.log("2vars get field -> " + vars.get("$field.USE_FOR_INDEX_DUPLICATE_SEARCH")); + +if(vars.get("$this.value") == null || vars.get("$this.value") == "") +{ + logging.log("2im if -> "); + result.string("0"); +} \ No newline at end of file diff --git a/entity/DuplicateScannerIndexConfig_entity/entityfields/use_for_index_duplicate_search/valueProcess.js b/entity/DuplicateScannerIndexConfig_entity/entityfields/use_for_index_duplicate_search/valueProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..c0cd3f68acb58f6e22c68cbd3ff8113b10d4a3a0 --- /dev/null +++ b/entity/DuplicateScannerIndexConfig_entity/entityfields/use_for_index_duplicate_search/valueProcess.js @@ -0,0 +1,12 @@ +import("system.result"); +import("system.vars"); +import("system.logging"); + +logging.log("this value -> " + vars.get("$this.value")); +logging.log("vars get field -> " + vars.get("$field.USE_FOR_INDEX_DUPLICATE_SEARCH")); + +if(vars.get("$this.value") == null || vars.get("$this.value") == "") +{ + logging.log("im if -> "); + result.string("0"); +} \ No newline at end of file diff --git a/entity/DuplicateScannerPrefilterConfig_entity/DuplicateScannerPrefilterConfig_entity.aod b/entity/DuplicateScannerPrefilterConfig_entity/DuplicateScannerPrefilterConfig_entity.aod index 000df595b276b83f6156de978c6d365a06a25f99..a602d25796a34b82ec971a2c2e63c66b82efead7 100644 --- a/entity/DuplicateScannerPrefilterConfig_entity/DuplicateScannerPrefilterConfig_entity.aod +++ b/entity/DuplicateScannerPrefilterConfig_entity/DuplicateScannerPrefilterConfig_entity.aod @@ -2,7 +2,7 @@ <entity xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.3.11" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/entity/1.3.11"> <name>DuplicateScannerPrefilterConfig_entity</name> <majorModelMode>DISTRIBUTED</majorModelMode> - <title>Condition Configuration</title> + <title>Prefilter Configuration</title> <recordContainer>DBRecordContainer</recordContainer> <entityFields> <entityProvider> diff --git a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod index 32667f2dcf241a8736f466c712380365ec8488cf..51dc169af88b12fb4a67e1187deb1de18bed371f 100644 --- a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod +++ b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod @@ -26,6 +26,7 @@ <name>EXTERNAL_SERVICE_USAGE_ALLOWED</name> <title>Use external Scanservice</title> <contentType>BOOLEAN</contentType> + <dropDownProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/dropDownProcess.js</dropDownProcess> <valueProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js</valueProcess> </entityField> <entityConsumer> diff --git a/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/dropDownProcess.js b/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/dropDownProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..b88d5392d1a40aa827f90e9dfcdd26303c4b1baa --- /dev/null +++ b/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/dropDownProcess.js @@ -0,0 +1,7 @@ +import("system.translate"); +import("system.result"); + +result.object([ + ["1", translate.text("Yes")] + ,["0", translate.text("No")] +]); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js b/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js index 8ceab422c71a60d2a495229d1527714841607393..41d52ac633e2e45151d876df251909919aaeabaf 100644 --- a/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js @@ -1,8 +1,7 @@ import("system.result"); import("system.vars"); -import("system.neon"); -if(vars.get("$sys.operatingstate") == neon.OPERATINGSTATE_NEW) +if(vars.get("$this.value") == null || vars.get("$this.value") == "") { result.string("0"); } \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js index d9234591d17b20bfb5010003315040d0f43f6fb6..362bf7ba4c7e419ab41d4ae173d0c603196a86d4 100644 --- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js +++ b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js @@ -23,21 +23,44 @@ import("JditoFilter_lib"); //var filterName = "PersonDuplicates"; //var targetEntity = "Person_entity"; //var resultFieldsIdFieldName = "CONTACTID"; +// +//var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"]; //var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from CONTACT" // + " join PERSON on PERSONID = PERSON_ID"; -//var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"]; +// // //var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"], ["GENDER", "m"]]; // ////DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays, ////pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore) // +///* +// * +// */ +// +// +// +//let duplicateFieldsConfig = DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration(filterName, targetEntity); +// +//let querySelectFields = ""; +//for (let i = 0; i < duplicateFieldsConfig.length; i++) +//{ +// querySelectFields += duplicateFieldsConfig[i][0]; +// +// if(i < duplicateFieldsConfig.length) +// querySelectFields += ", "; +//} +// +//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" +// + " join PERSON on PERSONID = PERSON_ID"; +// +//DuplicateScannerUtils.GetEntityFieldNameValueMap(duplicateFieldsConfig); // //DuplicateScannerUtils.ScanForDuplicatesIndex(filterName, targetEntity, //filterFieldValueRays, [], resultFieldsIdFieldName, "29271db0-4253-46c9-b7c2-5e25376b9d19"); -//#################################################################################### +//##############################ANs Beispiel###################################################### //logging.log("TEST INDEX API with Entities"); @@ -70,48 +93,48 @@ import("JditoFilter_lib"); -//#################################################################################### - - - - +//######################################Demosuche nach Datensatz############################################## -let indexQuery = indexsearch.createIndexQuery() - .setPattern("(+(-contactid_value:(29271db0-4253-46c9-b7c2-5e25376b9d19)) +gender_value:m*)") - .setEntities(["Person_entity"]) - .addResultFields("Person_entity.FIRSTNAME") - .setRows(50); -let filterTerm1 = indexsearch.createTerm("Narkus") - .setIndexField("firstname_value") - .setFuzzySearchFactor(0); -let filterTerm2 = indexsearch.createTerm("Bltinger") - .setIndexField("lastname_value") - .setFuzzySearchFactor(0); -let filterPatternConfig = indexsearch.createPatternConfig().and(filterTerm1); - -let filterPatternString = indexsearch.buildPatternString(filterPatternConfig); -logging.log("Hauptsuche filterPatternString -> " + filterPatternString); -indexQuery = indexQuery.addFilter(filterPatternString); - -let searchResult = indexsearch.searchIndex(indexQuery); -logging.log("searchResult -> " + searchResult); - -logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); - -for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) -{ - logging.log("Treffer Nr -> " + i); - //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c} - let localId = searchResult[indexsearch.HITS][i]["_local_id_"]; - let firstname = searchResult[indexsearch.HITS][i]["firstname_value"]; - let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"]; - logging.log("localId -> " + localId); - logging.log("firstname -> " + firstname); - logging.log("indexGroup -> " + indexGroup); -} +// +//let indexQuery = indexsearch.createIndexQuery() +// .setPattern("(+(-contactid_value:(29271db0-4253-46c9-b7c2-5e25376b9d19)) +gender_value:m*)") +// .setEntities(["Person_entity"]) +// .addResultFields("Person_entity.FIRSTNAME") +// .setRows(50); +// +// +//let filterTerm1 = indexsearch.createTerm("Barkus") +// .setIndexField("firstname_value") +// .setFuzzySearchFactor(0); +////let filterTerm2 = indexsearch.createTerm("Altinger") +//// .setIndexField("lastname_value") +//// .setFuzzySearchFactor(0); +// +//let filterPatternConfig = indexsearch.createPatternConfig().and(filterTerm1); +// +//let filterPatternString = indexsearch.buildPatternString(filterPatternConfig); +//logging.log("Hauptsuche filterPatternString -> " + filterPatternString); +//indexQuery = indexQuery.addFilter(filterPatternString); +// +//let searchResult = indexsearch.searchIndex(indexQuery); +//logging.log("searchResult -> " + searchResult); +// +//logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); +// +//for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) +//{ +// logging.log("Treffer Nr -> " + i); +// //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c} +// let localId = searchResult[indexsearch.HITS][i]["_local_id_"]; +// let firstname = searchResult[indexsearch.HITS][i]["firstname_value"]; +// let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"]; +// logging.log("localId -> " + localId); +// logging.log("firstname -> " + firstname); +// logging.log("indexGroup -> " + indexGroup); +//} @@ -155,4 +178,40 @@ for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) //DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, //tmpFieldsInFilterRay, resultFieldsIdFieldName); // -//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); \ No newline at end of file +//DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); + + +//############################################################################## + +var filterName = "PersonDuplicates"; +var targetEntity = "Person_entity"; +var resultFieldsIdFieldName = "CONTACTID"; + +//Array +//[DB_FELD, ENTITY_FELD, IS_ID, USE_FOR_SEARCH] +//["CONTACTID", "CONTACTID", true, false] +//["FIRSTNAME", "FIRSTNAME", false, true] +let duplicateFieldsConfig = DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration(filterName, targetEntity); + +logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); + +let querySelectFields = ""; +for (let i = 0; i < duplicateFieldsConfig.length; i++) +{ + querySelectFields += duplicateFieldsConfig[i][0]; + + if(i < duplicateFieldsConfig.length - 1) + querySelectFields += ", "; +} + +let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" + + " join PERSON on PERSONID = PERSON_ID"; + +logging.log("Löschen von PERSON Dubletten -> "); +DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); + +logging.log("Neu berechnen von PERSON Dubletten -> "); +DuplicateScannerUtils.RebuildDuplicatesCacheIndex(filterName, targetEntity, queryPersonFieldData, +duplicateFieldsConfig); + +DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); \ No newline at end of file diff --git a/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod b/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod index c5cc0dca6c80e78afdc463105f9d94cc1ef1360e..1565ea777ebbd43365a113191ecc584024642a25 100644 --- a/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod +++ b/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod @@ -4657,7 +4657,7 @@ <key>Target Entity</key> </entry> <entry> - <key>Condition Configuration</key> + <key>Prefilter Configuration</key> </entry> <entry> <key>Characters to use</key> diff --git a/language/_____LANGUAGE_de/_____LANGUAGE_de.aod b/language/_____LANGUAGE_de/_____LANGUAGE_de.aod index 97eb7533c1e5e6b1633c4551ccb420ee4c33942b..4bc8175b2d5b19fcd3378a1e3cad6c387ec2fd83 100644 --- a/language/_____LANGUAGE_de/_____LANGUAGE_de.aod +++ b/language/_____LANGUAGE_de/_____LANGUAGE_de.aod @@ -6054,8 +6054,8 @@ <value>Ziel Entity</value> </entry> <entry> - <key>Condition Configuration</key> - <value>Filtereinstellungen</value> + <key>Prefilter Configuration</key> + <value>Vorfilter</value> </entry> <entry> <key>Characters to use</key> diff --git a/language/_____LANGUAGE_en/_____LANGUAGE_en.aod b/language/_____LANGUAGE_en/_____LANGUAGE_en.aod index ac99b8ce01b4ffeefda475668da2fcf59f22b261..53c5a0442434e372703f8ed8cb27959ab00f6b95 100644 --- a/language/_____LANGUAGE_en/_____LANGUAGE_en.aod +++ b/language/_____LANGUAGE_en/_____LANGUAGE_en.aod @@ -4707,7 +4707,7 @@ <key>Filter Name</key> </entry> <entry> - <key>Condition Configuration</key> + <key>Prefilter Configuration</key> </entry> <entry> <key>Configuration name</key> diff --git a/neonContext/DuplicateScannerPrefilterConfig/DuplicateScannerPrefilterConfig.aod b/neonContext/DuplicateScannerPrefilterConfig/DuplicateScannerPrefilterConfig.aod index 819fa9bd9c81202ffd6acddb97b0120d964a686e..5e8c1485487dffff2f01aa4dc4bfe82756cac700 100644 --- a/neonContext/DuplicateScannerPrefilterConfig/DuplicateScannerPrefilterConfig.aod +++ b/neonContext/DuplicateScannerPrefilterConfig/DuplicateScannerPrefilterConfig.aod @@ -1,7 +1,7 @@ <?xml version="1.0" encoding="UTF-8"?> <neonContext xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.0" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonContext/1.1.0"> <name>DuplicateScannerPrefilterConfig</name> - <title>Condition Configuration</title> + <title>Prefilter Configuration</title> <majorModelMode>DISTRIBUTED</majorModelMode> <editview>DuplicateEdit_view</editview> <entity>DuplicateScannerPrefilterConfig_entity</entity> diff --git a/neonView/DuplicateScannerEdit_view/DuplicateScannerEdit_view.aod b/neonView/DuplicateScannerEdit_view/DuplicateScannerEdit_view.aod index 4489a6e8a0029841f3f4d36934d069330ed3e03b..2ce2c53291563518eaf725ec752a131cdf433cc7 100644 --- a/neonView/DuplicateScannerEdit_view/DuplicateScannerEdit_view.aod +++ b/neonView/DuplicateScannerEdit_view/DuplicateScannerEdit_view.aod @@ -35,7 +35,7 @@ <view>DuplicateEdit_view</view> </neonViewReference> <neonViewReference> - <name>94a33f54-6bea-4ec7-8bcf-2763f471cbe3</name> + <name>c5835213-b777-42ac-b0cc-d8923957d723</name> <entityField>ScannerIndexConfigs_Consumer</entityField> <view>DuplicateScannerIndexConfigEdit_view</view> </neonViewReference> diff --git a/neonView/DuplicateScannerIndexConfigEdit_view/DuplicateScannerIndexConfigEdit_view.aod b/neonView/DuplicateScannerIndexConfigEdit_view/DuplicateScannerIndexConfigEdit_view.aod index bcb3d22f06a9ff17610abe0f9ded4381cea191fc..680269d7292a4a3186924671ac29d3819ab1ea51 100644 --- a/neonView/DuplicateScannerIndexConfigEdit_view/DuplicateScannerIndexConfigEdit_view.aod +++ b/neonView/DuplicateScannerIndexConfigEdit_view/DuplicateScannerIndexConfigEdit_view.aod @@ -12,15 +12,25 @@ <name>EntityFieldsConfig</name> <autoNewRow v="true" /> <entityField>#ENTITY</entityField> + <title></title> + <devices /> <columns> <neonTableColumn> - <name>db56e501-a380-45bb-9d9d-530fb3c5f128</name> - <entityField>TARGET_CONTEXT</entityField> + <name>bc39c449-d9d6-4049-902e-24740d7a951f</name> + <entityField>DB_FIELD_NAME</entityField> </neonTableColumn> <neonTableColumn> - <name>29df7f7b-b929-422f-8c6c-540669928687</name> + <name>d6119b22-69b6-4b3c-8e83-99c000586a67</name> <entityField>ENTITY_FIELD_NAME</entityField> </neonTableColumn> + <neonTableColumn> + <name>4d2274f7-bd1b-4637-b25c-f3bd0f52bb9b</name> + <entityField>IS_ID_FIELD</entityField> + </neonTableColumn> + <neonTableColumn> + <name>7e80f268-bb47-408d-9f7f-d31d6461de2f</name> + <entityField>USE_FOR_INDEX_DUPLICATE_SEARCH</entityField> + </neonTableColumn> </columns> </genericMultipleViewTemplate> </children> diff --git a/neonView/PersonFilter_view/PersonFilter_view.aod b/neonView/PersonFilter_view/PersonFilter_view.aod index f2d1db5194d9d513c3cfd7132079819c268289bf..ed985547e5bfcfb0b00f48f2666212c84a5ad5fe 100644 --- a/neonView/PersonFilter_view/PersonFilter_view.aod +++ b/neonView/PersonFilter_view/PersonFilter_view.aod @@ -46,6 +46,10 @@ <name>210cc6ab-5123-4d8a-8f2e-a6cd91d494ef</name> <entityField>PICTURE</entityField> </neonTableColumn> + <neonTableColumn> + <name>b5034b17-efe0-4351-98b4-b83928ae0190</name> + <entityField>CONTACTID</entityField> + </neonTableColumn> <neonTableColumn> <name>125d04cc-5c7a-4c38-bd0f-b5d02d21067d</name> <entityField>SALUTATION</entityField> diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index 40d53e2bc71f631637fea16634c581a8bab1391e..b7dc11c2641b4e36e23908c6f4cbb4964403047f 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -327,17 +327,19 @@ pQueryTargetRecords, pFilterFields, pRecordIdFieldToIgnore) return db.inserts(duplicatesToInsertQueries); } -DuplicateScannerUtils.RebuildDuplicatesCacheIndex = function(pFilterName, pTargetEntity, -pQueryTargetRecords, pFilterFields, pRecordIdFieldToIgnore) +//pQueryTargetRecords, pFilterFields, pRecordIdFieldToIgnore) +DuplicateScannerUtils.RebuildDuplicatesCacheIndex = function(pFilterName, pTargetEntity, +pQueryTargetRecords, pDuplicateFieldsConfig) { + logging.log("in RebuildDuplicatesCache -> "); let alreadyIdentifiedIds = []; - let contactIdsToScan = db.table(pQueryTargetRecords); - logging.log("contactIdsToScan -> " + JSON.stringify(contactIdsToScan)); + let targetRecordsData = db.table(pQueryTargetRecords); + logging.log("targetRecordsData -> " + JSON.stringify(targetRecordsData)); //If the contact id loading query has no results, stop. //No ids should be deleted if an error has been made in this query. - if(contactIdsToScan.length <= 0) + if(targetRecordsData.length <= 0) return; /* @@ -345,66 +347,116 @@ pQueryTargetRecords, pFilterFields, pRecordIdFieldToIgnore) * Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started */ var duplicatesToInsertQueries = []; - for (b = 0; b < contactIdsToScan.length; b++) + for (b = 0; b < targetRecordsData.length; b++) { -// logging.log("b -> " + b); -// logging.log("indexOf(contactIdsToScan[b] -> " + alreadyIdentifiedIds.indexOf(contactIdsToScan[b])); + logging.log("b -> " + b); + logging.log("Neuer Record -> " + targetRecordsData[b]); +// logging.log("indexOf(targetRecordsData[b] -> " + alreadyIdentifiedIds.indexOf(targetRecordsData[b])); //If the current Id has already been identified, continue - if(alreadyIdentifiedIds.indexOf(contactIdsToScan[b][0]) > -1) + if(alreadyIdentifiedIds.indexOf(targetRecordsData[b][0]) > -1) continue; -// logging.log("contactid noch nicht bearbeitet -> " + contactIdsToScan[b][0]); +// logging.log("contactid noch nicht bearbeitet -> " + targetRecordsData[b][0]); - let filterValuesObject = {}; - let entityFieldValuesRay = []; - /* - * Based on the parameterized filter field names and the values loaded via the query, - * an object in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created. - * This mandatory to run the scan for this record. - */ - for (a = 0; a < pFilterFields.length; a++) - { -// logging.log("pFilterValues[a] -> " + pFilterFields[a]); -// logging.log("contactIdsToScan[i][a] -> " + contactIdsToScan[b][a]); - - //filterValuesObject[pFilterFields[a]] = contactIdsToScan[b][a]; - entityFieldValuesRay.push([pFilterFields[a], contactIdsToScan[b][a]]) -// logging.log("filterValuesObject[pFilterValues[a]] -> " + filterValuesObject[pFilterFields[a]]); - } + let entityFieldValuesRay = DuplicateScannerUtils._buildEntityFieldNameValueRays(pDuplicateFieldsConfig, targetRecordsData[b]); logging.log("entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay)); + //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic + let idField = entityFieldValuesRay[0][0]; + let idValue = entityFieldValuesRay[0][1]; + logging.log("idField -> " + idField); + logging.log("idValue -> " + idValue); + let foundDuplicates = DuplicateScannerUtils.ScanForDuplicatesIndex(pFilterName, pTargetEntity, - entityFieldValuesRay, [pRecordIdFieldToIgnore], pRecordIdFieldToIgnore, contactIdsToScan[b][0]) + entityFieldValuesRay, [/*TODO Hier kommen die Resultfields rein*/], idField, idValue) // logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates)); - if(foundDuplicates.length == 0) +logging.log("foundDuplicates -> " + foundDuplicates); + logging.log("foundDuplicates[indexsearch.TOTALHITS] -> " + foundDuplicates[indexsearch.TOTALHITS]); + if(foundDuplicates[indexsearch.TOTALHITS] == 0) continue; //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron. let foundDuplicateIds = []; - for (let i = 0; i < foundDuplicates.length; i++) + for (let i = 0; i < foundDuplicates[indexsearch.HITS].length; i++) { // logging.log("i -> " + i); // logging.log("foundDuplicates[pRecordIdFieldToIgnore] -> " + foundDuplicates[i][pRecordIdFieldToIgnore]); - foundDuplicateIds.push(foundDuplicates[i][pRecordIdFieldToIgnore]); + let localId = foundDuplicates[indexsearch.HITS][i][indexsearch.FIELD_ID]; + logging.log("localId der gefundenen Dublette-> " + localId); + foundDuplicateIds.push(localId); } alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds); //The duplicates list contains only the found duplicates to the original id, therefore it get's added manually - foundDuplicateIds.push(contactIdsToScan[b][0]); + foundDuplicateIds.push(targetRecordsData[b][0]); logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates)); logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds)); let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity) duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay); } -// logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries)); + logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries)); return db.inserts(duplicatesToInsertQueries); } +/* + * Creates an array of arrays containing the entity field name paired with it's value. + * + * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [DB_FIELD, ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration() + * @param {[]} pTargetRecordData One record containing the values for the configured fields. Has to be in the same order as the fields in the first parameter + * @return {[[]]} An array of arrays containing the entity field name an its value. [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"]] + * @example + * pDuplicateFieldsConfig + * ["CONTACTID", "CONTACTID", true, false] + * ["FIRSTNAME", "FIRSTNAME", false, true] + * + * pTargetRecordData + * ["d786045c-8b21-4f22-b6d9-72be9f61c04d", "PETER"] + * + * => [["CONTACTID", "d786045c-8b21-4f22-b6d9-72be9f61c04d"], ["FIRSTNAME", "PETER"]] + */ +DuplicateScannerUtils._buildEntityFieldNameValueRays = function(pDuplicateFieldsConfig, pTargetRecordData) +{ + let INDEX_CONFIG_ENTITY_FIELD = 1; + let INDEX_CONFIG_USE_FOR_SEARCH = 3; + let entityFieldValuesRay = []; + /* + * Based on the parameterized filter field names and the values loaded via the query, + * an array which contains records in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created. + * This is mandatory to run the scan for this record. + */ + for (a = 0; a < pDuplicateFieldsConfig.length; a++) + { + logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]); + logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]); + + entityFieldValuesRay.push([pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD], pTargetRecordData[a], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]]) + } + return entityFieldValuesRay; +} + +DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration = function(pFilterName, pTargetEntity) +{ +// select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic +// join DUPLICATESCANNER ds on ds.ID = dsic.DUPLICATESCANNER_ID +// where ds.FILTER_NAME = 'PersonDuplicates' +// and ds.ENTITY_TO_SCAN_NAME = 'Person_entity' +// order by dsic.IS_ID_FIELD desc + + let duplicateIndexFieldConfigurations = SqlCondition.begin() + .andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName) + .andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) + .buildSql("select dsic.DB_FIELD_NAME, dsic.ENTITY_FIELD_NAME, dsic.IS_ID_FIELD, dsic.USE_FOR_INDEX_DUPLICATE_SEARCH from DUPLICATESCANNERINDEXCONFIG dsic join DUPLICATESCANNER on DUPLICATESCANNER.ID = dsic.DUPLICATESCANNER_ID" + , "1=2", "order by dsic.IS_ID_FIELD desc"); + + logging.log("duplicateIndexFieldConfigurations -> " + duplicateIndexFieldConfigurations); + return db.table(duplicateIndexFieldConfigurations); +} + DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays, pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore) { @@ -438,7 +490,7 @@ pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore) //(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows) //run actual index duplicate search - possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, [indexsearch.FIELD_ID], ["Person_entity.FIRSTNAME"], 100); + possibleDuplicates = DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, [/*indexsearch.FIELD_ID*/], ["Person_entity.FIRSTNAME"], 100); logging.log("possibleDuplicates -> " + possibleDuplicates); } @@ -506,11 +558,11 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun //Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time //Only load "indexsearch.FIELD_ID" and a resultSet size of 1 let searchResult = DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [], - [indexsearch.FIELD_ID], ["Person_entity.FIRSTNAME"], 1); + [/*indexsearch.FIELD_ID*/], ["Person_entity.FIRSTNAME"], 1); logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS][0]); - if(searchResult[indexsearch.HITS].length < 80) + if(searchResult[indexsearch.HITS].length < 80)//todo entfernen?! { for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) { @@ -560,9 +612,13 @@ DuplicateScannerUtils._applyPreFilterIndex = function(pTargetEntity, pFilterCoun DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultIndexFields, pResultFields, pResultSetRows) { + //The index to get the fields value for USE_FOR_SEARCH + //Structure of this array is [ENTITY_FIELD, FIELD_VALUE, USE_FOR_SEARCH] + let INDEX_CONFIG_USE_FOR_SEARCH = 2; + let parsedFilterAsPatternTerm = indexsearch.buildQueryFromSearchCondition(pPreFilterJson); logging.log("pTargetEntity -> " + pTargetEntity); - logging.log("pResultIndexFields -> " + pResultIndexFields); + //logging.log("pResultIndexFields -> " + pResultIndexFields); logging.log("pResultFields -> " + pResultFields); logging.log("pResultSetRows -> " + pResultSetRows); let indexQuery = indexsearch.createIndexQuery() @@ -579,15 +635,14 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, let filterPatternConfig = indexsearch.createPatternConfig(); for (let i = 0; i < pEntityFieldValueRays.length; i++) { + if(pEntityFieldValueRays[i][INDEX_CONFIG_USE_FOR_SEARCH] == 0) + continue; let entityFieldValue = pEntityFieldValueRays[i][1]; let entityFieldName = pEntityFieldValueRays[i][0]; logging.log("entityFieldValue -> " + entityFieldValue); logging.log("entityFieldName -> " + entityFieldName); - - if(entityFieldName == "CONTACTID" || entityFieldName == "GENDER") - continue; - + let indexField = indexsearch.lookupIndexField(pTargetEntity, entityFieldName); logging.log("indexField -> " + indexField); @@ -604,8 +659,7 @@ DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, logging.log("filterPatternString -> " + filterPatternString); indexQuery.addFilter(filterPatternString); } - //logging.log("indexQuery -> " + JSON.stringify(indexQuery)); - return indexsearch.searchIndex(indexQuery); + return indexsearch.searchIndex(indexQuery);; }