diff --git a/.liquibase/Data_alias/basic/2021.0.2/Duplicate/alter_unrelatedduplicates.xml b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/alter_unrelatedduplicates.xml new file mode 100644 index 0000000000000000000000000000000000000000..c18f60a8cce7a792b78d66086d48e21015c1c897 --- /dev/null +++ b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/alter_unrelatedduplicates.xml @@ -0,0 +1,13 @@ +<?xml version="1.1" encoding="UTF-8" standalone="no"?> +<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.6.xsd"> + <changeSet author="p.neub" id="082919b2-ebfd-4864-9839-89e593fb6f2d"> + <dropColumn tableName="UNRELATEDDUPLICATES"> + <column name="CLUSTERID"/> + </dropColumn> + <addColumn tableName="UNRELATEDDUPLICATES"> + <column name="DUPLICATETYPE" type="NVARCHAR(63)"/> + </addColumn> + </changeSet> +</databaseChangeLog> \ No newline at end of file diff --git a/.liquibase/Data_alias/basic/2021.0.2/Duplicate/changelog.xml b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/changelog.xml new file mode 100644 index 0000000000000000000000000000000000000000..8f9ec6ad4aa00fdffe47749b96ea49621ae1b74a --- /dev/null +++ b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/changelog.xml @@ -0,0 +1,9 @@ +<?xml version="1.1" encoding="UTF-8" standalone="no"?> +<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.6.xsd"> + <include relativeToChangelogFile="true" file="create_hasduplicate.xml"/> + <include relativeToChangelogFile="true" file="drop_duplicateclusters.xml"/> + <include relativeToChangelogFile="true" file="drop_duplicatescannerresultfieldconfig.xml"/> + <include relativeToChangelogFile="true" file="alter_unrelatedduplicates.xml"/> +</databaseChangeLog> diff --git a/.liquibase/Data_alias/basic/2021.0.2/Duplicate/create_hasduplicate.xml b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/create_hasduplicate.xml new file mode 100644 index 0000000000000000000000000000000000000000..08b11b5a91c5698784e6167ce114aa1d48df5d2a --- /dev/null +++ b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/create_hasduplicate.xml @@ -0,0 +1,14 @@ +<?xml version="1.0" encoding="UTF-8"?> +<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.6.xsd"> + <changeSet author="p.neub" id="db282e4c-01c2-4b55-aa85-01306f4b1f7a"> + <createTable tableName="HASDUPLICATE"> + <column name="HASDUPLICATEID" type="CHAR(36)"> + <constraints primaryKey="true" primaryKeyName="PK_HASDUPLICATE_HASDUPLICATEID"></constraints> + </column> + + <column name="OBJECT_TYPE" type="NVARCHAR(63)"/> + <column name="OBJECT_ROWID" type="CHAR(36)"/> + <column name="DUPLICATECOUNT" type="INTEGER"/> + </createTable> + </changeSet> +</databaseChangeLog> diff --git a/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicateclusters.xml b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicateclusters.xml new file mode 100644 index 0000000000000000000000000000000000000000..b61e7cb651dec4c502d4956f3caf44b7640bbb1c --- /dev/null +++ b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicateclusters.xml @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.6.xsd"> + <changeSet author="p.neub" id="45db014f-922d-4619-b69f-9bf3a36285f4"> + <dropTable tableName="DUPLICATECLUSTERS"/> + </changeSet> +</databaseChangeLog> diff --git a/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicatescannerresultfieldconfig.xml b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicatescannerresultfieldconfig.xml new file mode 100644 index 0000000000000000000000000000000000000000..6804c4345d4c745de74f6850985649c4329e11bb --- /dev/null +++ b/.liquibase/Data_alias/basic/2021.0.2/Duplicate/drop_duplicatescannerresultfieldconfig.xml @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.6.xsd"> + <changeSet author="p.neub" id="fcf592c4-e07c-4919-bfba-0bf99db161ec"> + <dropTable tableName="DUPLICATESCANNERRESULTFIELDCONFIG"/> + </changeSet> +</databaseChangeLog> diff --git a/.liquibase/Data_alias/basic/2021.0.2/changelog.xml b/.liquibase/Data_alias/basic/2021.0.2/changelog.xml index 537189254f0e2bd8898bea5e8426484074ded276..0322e55f658c95d79d1bc926c5dcced05c8b480a 100644 --- a/.liquibase/Data_alias/basic/2021.0.2/changelog.xml +++ b/.liquibase/Data_alias/basic/2021.0.2/changelog.xml @@ -4,4 +4,5 @@ <include relativeToChangelogFile="true" file="Event/changelog.xml"/> <include relativeToChangelogFile="true" file="alter_CompetitionExpandReasonSize.xml"/> <include relativeToChangelogFile="true" file="Checklists/changelog.xml"/> + <include relativeToChangelogFile="true" file="Duplicate/changelog.xml"/> </databaseChangeLog> \ No newline at end of file diff --git a/aliasDefinition/Data_alias/Data_alias.aod b/aliasDefinition/Data_alias/Data_alias.aod index abce13e87dc9a3833ecaa8cc1da7dbe3f9a5fbc9..ce871a6bd77e333e74f942b7f315146037214afd 100644 --- a/aliasDefinition/Data_alias/Data_alias.aod +++ b/aliasDefinition/Data_alias/Data_alias.aod @@ -11997,82 +11997,6 @@ </entityFieldDb> </entityFields> </entityDb> - <entityDb> - <name>DUPLICATECLUSTERS</name> - <dbName></dbName> - <idColumn>ID</idColumn> - <idGeneratorType v="0" /> - <idGeneratorInterval v="1" /> - <documentation></documentation> - <title></title> - <description></description> - <auditSyncConfig> - <name>auditSyncConfig</name> - <auditMode v="0" /> - <syncActive v="false" /> - <syncComplete v="true" /> - <syncDirection v="1" /> - <syncIds></syncIds> - </auditSyncConfig> - <entityFields> - <entityFieldDb> - <name>DUPLICATEID</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>CLUSTERID</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>ID</name> - <dbName></dbName> - <primaryKey v="true" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="true" /> - <index v="true" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>TARGET_ENTITY</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="12" /> - <size v="200" /> - <scale v="0" /> - <notNull v="false" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - </entityFields> - </entityDb> <entityDb> <name>UNRELATEDDUPLICATES</name> <dbName></dbName> @@ -12134,136 +12058,11 @@ <description></description> </entityFieldDb> <entityFieldDb> - <name>CLUSTERID</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="false" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - </entityFields> - </entityDb> - <entityDb> - <name>DUPLICATESCANNERRESULTFIELDCONFIG</name> - <dbName></dbName> - <idColumn>ID</idColumn> - <idGeneratorType v="0" /> - <idGeneratorInterval v="1" /> - <documentation></documentation> - <title></title> - <description></description> - <auditSyncConfig> - <name>auditSyncConfig</name> - <auditMode v="0" /> - <syncActive v="false" /> - <syncComplete v="true" /> - <syncDirection v="1" /> - <syncIds></syncIds> - </auditSyncConfig> - <entityFields> - <entityFieldDb> - <name>DATE_EDIT</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="93" /> - <size v="29" /> - <scale v="9" /> - <notNull v="false" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>DUPLICATESCANNER_ID</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="false" /> - <index v="true" /> - <documentation></documentation> - <title></title> - <description></description> - <dependencies> - <entityDependency> - <name>9b123a17-2f41-49f8-9492-f386ece46f7c</name> - <entityName>DUPLICATESCANNER</entityName> - <fieldName>ID</fieldName> - </entityDependency> - </dependencies> - </entityFieldDb> - <entityFieldDb> - <name>DATE_NEW</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="93" /> - <size v="29" /> - <scale v="9" /> - <notNull v="true" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>ID</name> - <dbName></dbName> - <primaryKey v="true" /> - <columnType v="1" /> - <size v="36" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="true" /> - <index v="true" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>USER_NEW</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="12" /> - <size v="50" /> - <scale v="0" /> - <notNull v="true" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>ENTITY_FIELD_NAME</name> + <name>DUPLICATETYPE</name> <dbName></dbName> <primaryKey v="false" /> <columnType v="12" /> - <size v="100" /> - <scale v="0" /> - <notNull v="false" /> - <isUnique v="false" /> - <index v="false" /> - <documentation></documentation> - <title></title> - <description></description> - </entityFieldDb> - <entityFieldDb> - <name>USER_EDIT</name> - <dbName></dbName> - <primaryKey v="false" /> - <columnType v="12" /> - <size v="50" /> + <size v="63" /> <scale v="0" /> <notNull v="false" /> <isUnique v="false" /> @@ -18354,6 +18153,82 @@ </entityFieldDb> </entityFields> </entityDb> + <entityDb> + <name>HASDUPLICATE</name> + <dbName></dbName> + <idColumn>HASDUPLICATEID</idColumn> + <idGeneratorType v="0" /> + <idGeneratorInterval v="1" /> + <documentation></documentation> + <title></title> + <description></description> + <auditSyncConfig> + <name>auditSyncConfig</name> + <auditMode v="0" /> + <syncActive v="false" /> + <syncComplete v="true" /> + <syncDirection v="1" /> + <syncIds></syncIds> + </auditSyncConfig> + <entityFields> + <entityFieldDb> + <name>OBJECT_ROWID</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="1" /> + <size v="36" /> + <scale v="0" /> + <notNull v="false" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + <entityFieldDb> + <name>OBJECT_TYPE</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="12" /> + <size v="63" /> + <scale v="0" /> + <notNull v="false" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + <entityFieldDb> + <name>DUPLICATECOUNT</name> + <dbName></dbName> + <primaryKey v="false" /> + <columnType v="4" /> + <size v="10" /> + <scale v="0" /> + <notNull v="false" /> + <isUnique v="false" /> + <index v="false" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + <entityFieldDb> + <name>HASDUPLICATEID</name> + <dbName></dbName> + <primaryKey v="true" /> + <columnType v="1" /> + <size v="36" /> + <scale v="0" /> + <notNull v="true" /> + <isUnique v="true" /> + <index v="true" /> + <documentation></documentation> + <title></title> + <description></description> + </entityFieldDb> + </entityFields> + </entityDb> <entityDb> <name>AB_SYNCCONTACT</name> <dbName></dbName> diff --git a/application/_____SYSTEM_APPLICATION_NEON/_____SYSTEM_APPLICATION_NEON.aod b/application/_____SYSTEM_APPLICATION_NEON/_____SYSTEM_APPLICATION_NEON.aod index 56f31da9f97db53dd72583e0cb8631a0a3527050..c5a04bbf0f75458721f25bf276e2ee6c19548e0e 100644 --- a/application/_____SYSTEM_APPLICATION_NEON/_____SYSTEM_APPLICATION_NEON.aod +++ b/application/_____SYSTEM_APPLICATION_NEON/_____SYSTEM_APPLICATION_NEON.aod @@ -314,11 +314,6 @@ <kind v="10077" /> <title></title> </entityNode> - <entityNode> - <name>Duplicates</name> - <kind v="10077" /> - <title></title> - </entityNode> <entityNode> <name>AuditLogHistory</name> <kind v="10077" /> diff --git a/entity/Contact_entity/recordcontainers/db/onDBDelete.js b/entity/Contact_entity/recordcontainers/db/onDBDelete.js index 61afa46ba950988b60515de3cae0e732553cfe69..c7b08f7110800f03efb3ca755622080493d3fd48 100644 --- a/entity/Contact_entity/recordcontainers/db/onDBDelete.js +++ b/entity/Contact_entity/recordcontainers/db/onDBDelete.js @@ -5,7 +5,8 @@ import("system.vars"); import("DuplicateScanner_lib"); var contactId = vars.get("$field.CONTACTID"); -DuplicateScannerUtils.deleteCachedDuplicate(contactId); +DuplicateScannerUtils.deleteHasDuplicateEntries("Person_entity", [contactId]); +DuplicateScannerUtils.deleteHasDuplicateEntries("Organisation_entity", [contactId]); new AttributeRelationQuery(contactId, null, "Person") .deleteAllAttributes(); diff --git a/entity/DuplicateScannerPrefilterConfig_entity/recordcontainers/dbrecordcontainer/conditionProcess.js b/entity/DuplicateScannerPrefilterConfig_entity/recordcontainers/dbrecordcontainer/conditionProcess.js deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/entity/DuplicateScannerResultFieldConfig_entity/DuplicateScannerResultFieldConfig_entity.aod b/entity/DuplicateScannerResultFieldConfig_entity/DuplicateScannerResultFieldConfig_entity.aod deleted file mode 100644 index b336a59f809d6d5834741fc16f8a6034815597b2..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/DuplicateScannerResultFieldConfig_entity.aod +++ /dev/null @@ -1,104 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<entity xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.3.18" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/entity/1.3.18"> - <name>DuplicateScannerResultFieldConfig_entity</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <recordContainer>recordContainer</recordContainer> - <entityFields> - <entityProvider> - <name>#PROVIDER</name> - </entityProvider> - <entityField> - <name>UID</name> - </entityField> - <entityField> - <name>ENTITY_FIELD_NAME</name> - <title>Entity field name</title> - </entityField> - <entityField> - <name>DATE_EDIT</name> - <valueProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_edit/valueProcess.js</valueProcess> - </entityField> - <entityField> - <name>DATE_NEW</name> - <valueProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_new/valueProcess.js</valueProcess> - </entityField> - <entityField> - <name>USER_NEW</name> - <valueProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_new/valueProcess.js</valueProcess> - </entityField> - <entityField> - <name>USER_EDIT</name> - <valueProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_edit/valueProcess.js</valueProcess> - </entityField> - <entityParameter> - <name>DuplicateScannerId_param</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityProvider> - <name>ScannerResultFieldConfigProvider</name> - <dependencies> - <entityDependency> - <name>119b48a5-ce78-4169-bd31-76e524cece99</name> - <entityName>DuplicateScanner_entity</entityName> - <fieldName>ScannerResultFieldsConfig_Consumer</fieldName> - <isConsumer v="false" /> - </entityDependency> - </dependencies> - </entityProvider> - <entityField> - <name>DUPLICATESCANNER_ID</name> - <valueProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/entityfields/duplicatescanner_id/valueProcess.js</valueProcess> - </entityField> - <entityProvider> - <name>#PROVIDER_AGGREGATES</name> - <useAggregates v="true" /> - </entityProvider> - </entityFields> - <recordContainers> - <dbRecordContainer> - <name>recordContainer</name> - <conditionProcess>%aditoprj%/entity/DuplicateScannerResultFieldConfig_entity/recordcontainers/recordcontainer/conditionProcess.js</conditionProcess> - <alias>Data_alias</alias> - <recordFieldMappings> - <dbRecordFieldMapping> - <name>DATE_EDIT.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.DATE_EDIT</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>DATE_NEW.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.DATE_NEW</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>ENTITY_FIELD_NAME.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.ENTITY_FIELD_NAME</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>UID.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.ID</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>USER_EDIT.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.USER_EDIT</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>USER_NEW.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.USER_NEW</recordfield> - </dbRecordFieldMapping> - <dbRecordFieldMapping> - <name>DUPLICATESCANNER_ID.value</name> - <recordfield>DUPLICATESCANNERRESULTFIELDCONFIG.DUPLICATESCANNER_ID</recordfield> - </dbRecordFieldMapping> - </recordFieldMappings> - <linkInformation> - <linkInformation> - <name>f7126f94-9e4c-46a0-8dc9-9e52fda1c7c0</name> - <tableName>DUPLICATESCANNERRESULTFIELDCONFIG</tableName> - <primaryKey>ID</primaryKey> - <isUIDTable v="false" /> - <readonly v="false" /> - </linkInformation> - </linkInformation> - </dbRecordContainer> - </recordContainers> -</entity> diff --git a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_edit/valueProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_edit/valueProcess.js deleted file mode 100644 index 5e6ef059738e0c724a468685333a5e257ac228ce..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_edit/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.util"); -import("system.result"); -import("system.neon"); -import("system.vars"); - -if(vars.get("$sys.recordstate") == neon.OPERATINGSTATE_EDIT) - result.string(vars.get("$sys.date")); \ No newline at end of file diff --git a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_new/valueProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_new/valueProcess.js deleted file mode 100644 index a72892783bf2bd04fe353c47f1be0cb570bbb323..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/date_new/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.util"); -import("system.result"); -import("system.neon"); -import("system.vars"); - -if(vars.get("$sys.recordstate") == neon.OPERATINGSTATE_NEW) - result.string(vars.get("$sys.date")); \ No newline at end of file diff --git a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/duplicatescanner_id/valueProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/entityfields/duplicatescanner_id/valueProcess.js deleted file mode 100644 index b8c682ad670116ff553d3774001f92eeec9478ce..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/duplicatescanner_id/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.result"); -import("system.vars"); - -if(vars.get("$this.value") == null || vars.get("$this.value") == "") -{ - result.string(vars.get("$param.DuplicateScannerId_param")); -} \ No newline at end of file diff --git a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_edit/valueProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_edit/valueProcess.js deleted file mode 100644 index 6af880ae3e0e2b89b4eee8327ed49f1eefe458af..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_edit/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.util"); -import("system.result"); -import("system.neon"); -import("system.vars"); - -if(vars.get("$sys.recordstate") == neon.OPERATINGSTATE_EDIT) - result.string(vars.get("$sys.user")); \ No newline at end of file diff --git a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_new/valueProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_new/valueProcess.js deleted file mode 100644 index e518bc75a9494e53a83613dedd943106e74fc00a..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/entityfields/user_new/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.util"); -import("system.result"); -import("system.neon"); -import("system.vars"); - -if(vars.get("$sys.recordstate") == neon.OPERATINGSTATE_NEW) - result.string(vars.get("$sys.user")); \ No newline at end of file diff --git a/entity/DuplicateScannerResultFieldConfig_entity/recordcontainers/recordcontainer/conditionProcess.js b/entity/DuplicateScannerResultFieldConfig_entity/recordcontainers/recordcontainer/conditionProcess.js deleted file mode 100644 index b9bff609c91222abb6c3b0292492600907585487..0000000000000000000000000000000000000000 --- a/entity/DuplicateScannerResultFieldConfig_entity/recordcontainers/recordcontainer/conditionProcess.js +++ /dev/null @@ -1,5 +0,0 @@ -import("system.vars"); -import("system.result"); - -if (vars.get("$param.DuplicateScannerId_param")) - result.string(newWhere("DUPLICATESCANNERRESULTFIELDCONFIG.DUPLICATESCANNER_ID", "$param.DuplicateScannerId_param").toString()); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod index 34fb0577dfe0c9ee42cc2f58458267359bd8f094..ef702ee976c3b1cbac40c3e1559ac2374c115b56 100644 --- a/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod +++ b/entity/DuplicateScanner_entity/DuplicateScanner_entity.aod @@ -33,16 +33,6 @@ <contentType>BOOLEAN</contentType> <valueProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/external_service_usage_allowed/valueProcess.js</valueProcess> </entityField> - <entityActionGroup> - <name>TestActionGroup</name> - <children> - <entityActionField> - <name>TestDuplicateScanner</name> - <title>Test DuplicateSearch</title> - <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js</onActionProcess> - </entityActionField> - </children> - </entityActionGroup> <entityField> <name>USER_NEW</name> <valueProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/user_new/valueProcess.js</valueProcess> @@ -71,40 +61,28 @@ <state>AUTO</state> <stateProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/scan_pattern/stateProcess.js</stateProcess> </entityField> - <entityConsumer> - <name>ScannerResultFieldsConfig_Consumer</name> - <dependency> - <name>dependency</name> - <entityName>DuplicateScannerResultFieldConfig_entity</entityName> - <fieldName>ScannerResultFieldConfigProvider</fieldName> - </dependency> - <children> - <entityParameter> - <name>DuplicateScannerId_param</name> - <valueProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/scannerresultfieldsconfig_consumer/children/duplicatescannerid_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> + <entityProvider> + <name>#PROVIDER_AGGREGATES</name> + <useAggregates v="true" /> + </entityProvider> <entityActionGroup> - <name>RunActionGroup</name> - <documentation>%aditoprj%/entity/DuplicateScanner_entity/entityfields/runactiongroup/documentation.adoc</documentation> + <name>FilterActions</name> <children> <entityActionField> - <name>RebuildPersonDuplicatesCache</name> - <title>Rebuild Person duplicates cache</title> - <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js</onActionProcess> + <name>rebuild</name> + <title>Rebuild selected entries</title> + <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/filteractions/children/rebuild/onActionProcess.js</onActionProcess> </entityActionField> <entityActionField> - <name>RebuildOrganisationDuplicatesCache</name> - <title>Rebuild Organisation duplicates cache</title> - <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js</onActionProcess> + <name>viewDuplicates</name> + <title>View duplicates</title> + <onActionProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/onActionProcess.js</onActionProcess> + <iconId>VAADIN:TABLE</iconId> + <state>READONLY</state> + <stateProcess>%aditoprj%/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/stateProcess.js</stateProcess> </entityActionField> </children> </entityActionGroup> - <entityProvider> - <name>#PROVIDER_AGGREGATES</name> - <useAggregates v="true" /> - </entityProvider> </entityFields> <recordContainers> <dbRecordContainer> diff --git a/entity/DuplicateScanner_entity/entityfields/filteractions/children/rebuild/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/filteractions/children/rebuild/onActionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..845cc1318d7d8c0b6ed614bf89bfeb2587ac1d7f --- /dev/null +++ b/entity/DuplicateScanner_entity/entityfields/filteractions/children/rebuild/onActionProcess.js @@ -0,0 +1,35 @@ +import("system.process"); +import("system.logging"); +import("system.entities"); +import("system.vars"); +import("DuplicateScanner_lib"); + +var selectedIds = vars.get("$sys.selection"); +var loadConfig = entities.createConfigForLoadingRows() + .entity("DuplicateScanner_entity") + .uids(selectedIds) + .fields(["FILTER_NAME", "ENTITY_TO_SCAN_NAME", "ID_FIELD_NAME", "SCAN_PATTERN"]); +var selectedEntries = entities.getRows(loadConfig); +logging.log("Rebuilding " + selectedEntries.length + " entries"); + +for(var i = 0; i < selectedEntries.length; i++) +{ + var currEntry = selectedEntries[i]; + var filterName = currEntry["FILTER_NAME"]; + var targetEntity = currEntry["ENTITY_TO_SCAN_NAME"]; + var targetIdField = currEntry["ID_FIELD_NAME"]; + var filter = JSON.parse(currEntry["SCAN_PATTERN"]); + + logging.log("Rebuilding " + filterName); + + var startConfig = process.createStartAsyncConfig() + .setName("rebuildDuplicates_serverProcess") + .setShowErrorDialog(true) + .setLocalVariables({ + filterName: filterName, + targetEntity: targetEntity, + targetIdField: targetIdField, + filter: JSON.stringify(filter.filter) + }); + process.startAsync(startConfig); +} diff --git a/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/onActionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..02ee1fa82ecf8801d3b066aecff5a628cbace308 --- /dev/null +++ b/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/onActionProcess.js @@ -0,0 +1,26 @@ +import("system.vars"); +import("system.neon"); +import("system.entities"); +import("Context_lib"); + +var config = entities.createConfigForLoadingRows() + .entity("DuplicateScanner_entity") + .uid(vars.get("$sys.selection")[0]) + .fields(["ENTITY_TO_SCAN_NAME"]); +var scanner = entities.getRow(config); +var contextId = ContextUtils.getContextName(ContextUtils.getContextId(scanner["ENTITY_TO_SCAN_NAME"])); + +neon.openContext(contextId, null, null, neon.OPERATINGSTATE_SEARCH, { + FilterPreSet_param: JSON.stringify({ + type: "group", + operator: "AND", + childs: [{ + type: "row", + name: "#EXTENSION.Duplicates_filter.Duplicates_filter#NUMBER", + operator: "GREATER", + value: "All Duplicates", + key: "0", + contenttype: "NUMBER" + }] + }) +}); diff --git a/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/stateProcess.js b/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/stateProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..67d17bda28f7cb502cfae5f807b761d8c468f838 --- /dev/null +++ b/entity/DuplicateScanner_entity/entityfields/filteractions/children/viewduplicates/stateProcess.js @@ -0,0 +1,8 @@ +import("system.result"); +import("system.vars"); +import("system.neon"); + +if(vars.get("$sys.selection").length == 1) + result.string(neon.COMPONENTSTATE_EDITABLE); +else + result.string(neon.COMPONENTSTATE_READONLY); diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js deleted file mode 100644 index e391b4c1938da96eddca596f8fb2211dd3d681ab..0000000000000000000000000000000000000000 --- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildorganisationduplicatescache/onActionProcess.js +++ /dev/null @@ -1,18 +0,0 @@ -import("system.logging"); -import("system.logging"); -import("DuplicateScanner_lib"); - -var filterName = "OrganisationDuplicates"; -var targetEntity = "Organisation_entity"; -var recordBlockSize = DuplicateScannerUtils.getBlockSize(); - -logging.log(filterName + ": Delete duplicates -> "); -DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); - -logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); - -logging.log(filterName + ": Refresh unrelated duplicates -> "); -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -logging.log(filterName + ": Done rebuilding "); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js deleted file mode 100644 index cad8736382338846edff77ef164f85488513731d..0000000000000000000000000000000000000000 --- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/children/rebuildpersonduplicatescache/onActionProcess.js +++ /dev/null @@ -1,18 +0,0 @@ -import("system.project"); -import("system.logging"); -import("DuplicateScanner_lib"); - -var filterName = "PersonDuplicates"; -var targetEntity = "Person_entity"; -var recordBlockSize = DuplicateScannerUtils.getBlockSize(); - -logging.log(filterName + ": Delete duplicates -> "); -DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); - -logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); - -logging.log(filterName + ": Refresh unrelated duplicates -> "); -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -logging.log(filterName + ": Done rebuilding "); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/runactiongroup/documentation.adoc b/entity/DuplicateScanner_entity/entityfields/runactiongroup/documentation.adoc deleted file mode 100644 index f998dfcc78c6497f121cd16b294c8d01e6e1c1c7..0000000000000000000000000000000000000000 --- a/entity/DuplicateScanner_entity/entityfields/runactiongroup/documentation.adoc +++ /dev/null @@ -1 +0,0 @@ -Since duplicate caching is disabled there is no need to show this action group within views at the moment. \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/scannerresultfieldsconfig_consumer/children/duplicatescannerid_param/valueProcess.js b/entity/DuplicateScanner_entity/entityfields/scannerresultfieldsconfig_consumer/children/duplicatescannerid_param/valueProcess.js deleted file mode 100644 index 3c559d42002c59103607e03f79eb792d99d74d31..0000000000000000000000000000000000000000 --- a/entity/DuplicateScanner_entity/entityfields/scannerresultfieldsconfig_consumer/children/duplicatescannerid_param/valueProcess.js +++ /dev/null @@ -1,3 +0,0 @@ -import("system.vars"); -import("system.result"); -result.string(vars.get("$field.UID")); \ No newline at end of file diff --git a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js b/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js deleted file mode 100644 index af7cd56a2472c2305bf914b22fc59c89a7ba03a8..0000000000000000000000000000000000000000 --- a/entity/DuplicateScanner_entity/entityfields/testactiongroup/children/testduplicatescanner/onActionProcess.js +++ /dev/null @@ -1,432 +0,0 @@ -import("system.process"); -import("KeywordRegistry_basic"); -import("system.db"); -import("ActivityTask_lib"); -import("Employee_lib"); -import("system.util"); -import("system.notification"); -import("system.notificationtypes"); -import("system.entities"); -import("system.project"); -import("system.indexsearch"); -import("system.question"); -import("system.logging"); -import("DuplicateScanner_lib"); -import("JditoFilter_lib"); - -//let testFields = []; -//let filters = DuplicateScannerUtils.loadFilters("PersonDuplicates", "Person_entity") -//logging.log("filters -> " + filters); -// -//for (let i = 0; i < filters.length; i++) -//{ -// logging.log("filters[i] -> " + filters[i]); -// let filter = JSON.parse(filters[i][0]).filter; -// let fields = JditoFilterUtils.getFilterFields(filter.childs); -// testFields = testFields.concat(fields); -//} -//logging.log("testFields -> " + testFields); - - -//##############################Test Duplicate Scan###################################################### - -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -//var resultFieldsIdFieldName = "CONTACTID"; -// -//var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"]; -//var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from CONTACT" -// + " join PERSON on PERSONID = PERSON_ID"; -// -// -//var filterFieldValueRays = [["CONTACTID", "29271db0-4253-46c9-b7c2-5e25376b9d19"], ["FIRSTNAME", "Narkus"], ["LASTNAME", "Bltinger"], ["GENDER", "m"]]; -// -////DuplicateScannerUtils.ScanForDuplicatesIndex = function(pFilterName, pTargetEntity, pFilterFieldValueRays, -////pTargetEntityResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore) -// -///* -// * -// */ -// -// -// -//let duplicateFieldsConfig = DuplicateScannerUtils.LoadDuplicateIndexFieldsConfiguration(filterName, targetEntity); -// -//let querySelectFields = ""; -//for (let i = 0; i < duplicateFieldsConfig.length; i++) -//{ -// querySelectFields += duplicateFieldsConfig[i][0]; -// -// if(i < duplicateFieldsConfig.length) -// querySelectFields += ", "; -//} -// -//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" -// + " join PERSON on PERSONID = PERSON_ID"; -// -//DuplicateScannerUtils.GetEntityFieldNameValueMap(duplicateFieldsConfig); -// -//DuplicateScannerUtils.ScanForDuplicatesIndex(filterName, targetEntity, -//filterFieldValueRays, [], resultFieldsIdFieldName, "29271db0-4253-46c9-b7c2-5e25376b9d19"); - - -//##############################ANs Beispiel###################################################### - - -//logging.log("TEST INDEX API with Entities"); -//logging.log(indexsearch.lookupIndexField("Person_entity", "FIRSTNAME")); -//logging.log(indexsearch.lookupIndexField("Person_entity", "FIRSTNAME.value")); -//logging.log(indexsearch.lookupIndexField("Person_entity", "PersAddresses.CITY")); -//logging.log(indexsearch.lookupIndexField("Person_entity", "PersAddresses.CITY.value")); -//logging.log(indexsearch.lookupIndexField(null, "Person_entity.FIRSTNAME.value")); -//logging.log(indexsearch.lookupIndexField(null, "Person_entity.PersAddresses.CITY.value")); -//var json = '{"entity":"Person_entity","filter":{"type":"group","operator":"AND","childs":[{"type":"row","name":"FIRSTNAME","operator":"STARTSWITH","value":"asd","key":"","contenttype":"TEXT"},{"type":"group","operator":"OR","childs":[{"type":"row","name":"LASTNAME","operator":"STARTSWITH","value":"L","key":"","contenttype":"TEXT"}]}]}}'; -//logging.log(indexsearch.buildQueryFromSearchCondition(json)); -// -//var t1 = indexsearch.createTerm("lisa").setEntityField("Person_entity.FIRSTNAME"); -//var t2 = indexsearch.createTerm("sommer").setEntityField("Person_entity.LASTNAME"); -//var t3 = indexsearch.createWildcardTerm("L").setEntityField("Person_entity.PersAddresses.CITY"); -// -//var patternConf = indexsearch.createPatternConfig().or(t1).or(t2).or(t3); -//var pattern = indexsearch.buildPatternString(patternConf); -//logging.log(pattern); -// -//var query = indexsearch.createIndexQuery() -//.setPattern(pattern) -//.setEntities("Person_entity") -////.addResultIndexFields(indexsearch.FIELD_ID) -//.addResultFields("Person_entity.FIRSTNAME") -//.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME"); -// -//var res = indexsearch.searchIndex(query); -//logging.log("" + res); - - - -//######################################Demosuche nach Datensatz############################################## - - - - -// -//let indexQuery = indexsearch.createIndexQuery() -// .setPattern("(+(-contactid_value:(29271db0-4253-46c9-b7c2-5e25376b9d19)) +gender_value:m*)") -// .setEntities(["Person_entity"]) -// .addResultFields("Person_entity.FIRSTNAME") -// .setRows(50); -// -// -//let filterTerm1 = indexsearch.createTerm("Barkus") -// .setIndexField("firstname_value") -// .setFuzzySearchFactor(0); -////let filterTerm2 = indexsearch.createTerm("Altinger") -//// .setIndexField("lastname_value") -//// .setFuzzySearchFactor(0); -// -//let filterPatternConfig = indexsearch.createPatternConfig().and(filterTerm1); -// -//let filterPatternString = indexsearch.buildPatternString(filterPatternConfig); -//logging.log("Hauptsuche filterPatternString -> " + filterPatternString); -//indexQuery = indexQuery.addFilter(filterPatternString); -// -//let searchResult = indexsearch.searchIndex(indexQuery); -//logging.log("searchResult -> " + searchResult); -// -//logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length); -// -//for (let i = 0; i < searchResult[indexsearch.HITS].length; i++) -//{ -// logging.log("Treffer Nr -> " + i); -// //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c} -// let localId = searchResult[indexsearch.HITS][i]["_local_id_"]; -// let firstname = searchResult[indexsearch.HITS][i]["firstname_value"]; -// let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"]; -// logging.log("localId -> " + localId); -// logging.log("firstname -> " + firstname); -// logging.log("indexGroup -> " + indexGroup); -//} - - - - - -//#################################################################################### - - -// -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -//var resultFieldsIdFieldName = "CONTACTID"; -//var queryPersonContactIds = "select CONTACTID, FIRSTNAME, LASTNAME, GENDER from CONTACT" -// + " join PERSON on PERSONID = PERSON_ID"; -//var tmpFieldsInFilterRay = ["CONTACTID", "FIRSTNAME", "LASTNAME", "GENDER"]; -// -//logging.log("Löschen von PERSON Dubletten -> "); -//DuplicateScannerUtils.deleteClustersByTargetEntity("Person_entity"); -// -//logging.log("Neu berechnen von PERSON Dubletten -> "); -//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, -//tmpFieldsInFilterRay, resultFieldsIdFieldName); -// -//DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -//############################################################################## - -//filterName = "OrganisationDuplicates"; -//targetEntity = "Organisation_entity"; -//resultFieldsIdFieldName = "CONTACTID"; -//queryPersonContactIds = "select CONTACTID, ORGANISATION.NAME from ORGANISATION" -// + " join CONTACT on CONTACT.CONTACTID = ORGANISATION.ORGANISATIONID" -// + " where CONTACTID != '0'"; -//tmpFieldsInFilterRay = ["CONTACTID", "NAME"]; -// -// -//logging.log("Löschen von ORGANISATION Dubletten -> "); -//DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity) -// -//logging.log("Neu berechnen von ORGANISATION Dubletten -> "); -//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonContactIds, -//tmpFieldsInFilterRay, resultFieldsIdFieldName); -// -//DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - - -//####################################Rebuild person duplicates########################################## - -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -// -//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); -//let resultFields = DuplicateScannerUtils.getResultFields(filterName, targetEntity); -// -//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig); -//logging.log("resultFields -> " + resultFields); -// -//let querySelectFields = DuplicateScannerUtils.BuildSqlSelectFieldsFromFieldConfig(duplicateFieldsConfig); -//logging.log("querySelectFields -> " + querySelectFields); -// -//let queryPersonFieldData = "select " + querySelectFields + " from CONTACT" -// + " join PERSON on PERSONID = PERSON_ID" -// + " left join ADDRESS on ADDRESS.CONTACT_ID = CONTACT.CONTACTID"; -// -//logging.log("Löschen von PERSON Dubletten -> "); -//DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); -// -//let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) -//{ -// let indexResultFields = DuplicateScannerUtils.translateEntityToIndexFields(targetEntity, resultFields) -// -// //Run thru every duplicate result an read out the resultfields -// for (let i = 0; i < pPossibleDuplicatesRay.length; i++) -// { -// for (let b = 0; b < resultFields.length; b++) -// { -// let entityFieldName = resultFields[b]; -// let indexFieldName = indexResultFields[entityFieldName]; -// //logging.log("Entity Field -> "+ pPossibleDuplicatesRay[i][indexFieldName]); -// //format values -// } -// } -// //call webservice -// //reformat results to same structure as before -// return pPossibleDuplicatesRay; -//}; -// -//logging.log("Neu berechnen von PERSON Dubletten -> "); -//DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, queryPersonFieldData, -//duplicateFieldsConfig, resultFields, formatToJsonAndCallWsCallback); -// -//DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - - - - - -//##################################single scanForDuplicates####################################################################### - - -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -// -////Values to check, the same fields as configured -//let valuesToCheck = {}; -//valuesToCheck["CONTACTID"] = "c7ddf982-0e58-4152-b82b-8f5673b0b729"; -//valuesToCheck["FIRSTNAME"] = "Tim"; -//valuesToCheck["GENDER"] = "m "; -// -////The result values can be accessed as seen above in "formatToJsonAndCallWsCallback" -//let pPossibleDuplicatesRay = DuplicateScannerUtils.scanForDuplicates(filterName, targetEntity, valuesToCheck, null); -// -//logging.log(" pPossibleDuplicatesRay-> " + pPossibleDuplicatesRay); -// -//for (let i = 0; i < pPossibleDuplicatesRay.length; i++) -//{ -// logging.log("pPossibleDuplicatesRay[i] -> " + pPossibleDuplicatesRay[i]); -//} -// - - -//################################ entity structure auslesen ############################################## - - -//var model = project.getEntityStructure("Person_entity"); -//logging.log("Name: " + model.name); -//logging.log("Title: " + model.title); -//logging.log("Description: " + model.description); -//logging.log("UsePermissions: " + model.usePermissions); -// -//for (fieldname in model.fields) -//{ -// field = model.fields[fieldname]; -// if(field.fieldType == project.ENTITYFIELDTYPE_FIELD) -// { -// logging.log(" Name: " + field.name); -// logging.log(" Type: " + field.fieldType); -// logging.log(" Title: " + field.title); -// logging.log(" Description: " + field.description); -// logging.log(" UsePermissions: " + field.usePermissions); -// logging.log("###################### -> "); -// } -// if(field.fieldType == project.ENTITYFIELDTYPE_CONSUMER) -// { -// let consumerEntity = field.entityName; -// -// if(consumerEntity == null || consumerEntity == "") -// continue; -// -// let consumerEntityFields = project.getEntityStructure(consumerEntity); -// for (consumerEntityFieldname in consumerEntityFields.fields) -// { -// consumerField = consumerEntityFields.fields[consumerEntityFieldname]; -// if(consumerField.fieldType == project.ENTITYFIELDTYPE_FIELD) -// { -// logging.log(" Name: " + consumerField.name); -// logging.log(" Type: " + consumerField.fieldType); -// logging.log(" Title: " + consumerField.title); -// logging.log(" Description: " + consumerField.description); -// logging.log(" UsePermissions: " + consumerField.usePermissions); -// logging.log("###################### -> "); -// } -// } -// } -//} - -//############################################################################## - -//var model = project.getEntityStructure("Person_entity"); -//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity); -// -//let combinedData = [] -//let entityFieldsToLoad = []; -//for (field in duplicateFieldsConfig) -//{ -// let entityFieldName = duplicateFieldsConfig[field][0]; -// let isIdField = duplicateFieldsConfig[field][1]; -// let useForIndexSearch = duplicateFieldsConfig[field][2]; -// let entityFieldData = model[entiyFieldName]; -// combinedData.push(entityFieldName, isIdField, useForIndexSearch, entityFieldData); -//} -// -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -//DuplicateScannerUtils.getEntityRecords(targetEntity, entityFieldsToLoad, 0, 50); - -//Beispiel 1: -//Feld mit verknüpftem Consumer - -//[entity, feldname, consumerName, ProviderName] -//let test = ["Communication_entity", "STANDARD_EMAIL_COMMUNICATION", "EmailCommunications", "EmailCommunications"]; -// -//let testrow = entities.createConfigForLoadingRows() -// .fields([test[1]]) -// .entity(test[0]) -// .provider(test[3]) -// .addParameter("ContactId_param", "d4c1bec3-656f-45ec-ae03-1c4d729d99fe") -// //.uid() -//let resultTest = entities.getRows(testrow); -//logging.log("resultTest -> " + JSON.stringify(resultTest)); - - - -//Beispiel 2: -//Feld direkt von anderem Entity -//let testrow = entities.createConfigForLoadingRows() -// .fields(["ZIP"]) -// .entity("Address_entity") -// .uid("1a67eaa7-21da-4a18-97ab-755ac5cb74f7") -// -//let resultTest = entities.getRows(testrow); -//logging.log("resultTest Beispiel 2 -> " + JSON.stringify(resultTest)); - - -//indexsearch.runIndexer(["Person"]); - - - - -//let resultClusterId = DuplicateScannerUtils.GetClusterWithDuplicates(["7a34d9d0-04c7-478c-a8e2-f584fe625c45", "c7ddf982-0e58-4152-b82b-8f5673b0b729"]); -//logging.log("resultClusterId -> " + resultClusterId); - - -// -//var filterName = "PersonDuplicates"; -//var targetEntity = "Person_entity"; -//var recordBlockSize = DuplicateScannerUtils.GetBlockSizeForScanner(filterName, targetEntity); -// -//logging.log("recordBlockSize -> " + recordBlockSize); - -//try -//{ -// let sourceContactId = "sourceContactId"; -// let targetContactId = "targetContactId"; -// -// var activityDataForInsert = { -// subject: "Es wurde ein Personendatensatz in diesen integriert", -// content: "Person mit ID " + sourceContactId + " wurde in Person mit ID " + targetContactId + " integriert", -// //categoryKeywordId: $KeywordRegistry.ac -// directionKeywordId: "x", -// responsibleContactId: EmployeeUtils.getCurrentContactId() -// }; -// -// var activityLinks = [["Person", "6e667085-bb97-4039-8dfe-2230002985e0"]] -// -// //activityLinks = ArrayUtils.distinct2d(activityLinks);//TODO: better check before adding the elements into the array if it already exists there -// -// var activityRes = ActivityUtils.insertNewActivity(activityDataForInsert, activityLinks, null, db.getCurrentAlias()); -//} -//catch (exception) -//{ -// logging.log("exception -> " + exception); -//} - -////notification.addNotification(util.getNewUUID(), null, null, null, notification., notification.PRIO_NORMAL, 2, notification.STATE_UNSEEN, [user], message, description); - - -//let currentContactId = EmployeeUtils.getCurrentContactId(); -//DuplicateScannerUtils.CreateMergeSuccessActivity("a2e084e2-d68a-4f1e-a1bb-f8d46ad6293d", "6e667085-bb97-4039-8dfe-2230002985e0", currentContactId, "Person"); - - -//logging.log("$KeywordRegistry.activityDirection$internal() -> " + $KeywordRegistry.activityDirection$internal()); - - - -//let processParameters = { -// filterName: "PersonDuplicates", -// targetEntity: "Person_entity" //process.execute is only able to handle strings -//} -//let userId = EmployeeUtils.getCurrentUserId(); -//if(userId == null) -// userId == ""; -//try -//{ -// let processId = "manualrun_rebuild_duplicatecache_" + util.getNewUUID(); -// process.executeTimer(processId, "RebuildAllDuplicatesCache_serverProcess", 0, true, false, process.TIMERTYPE_SERVER_RUN, userId, false, process.THREADPRIORITY_LOW) -// process.stopTimer(processId); -// logging.log("test -> "); -// process.executeAsync("RebuildAllDuplicateCaches_serverProcess", processParameters, false, userId, process.THREADPRIORITY_LOW) -// logging.log("test2 -> "); -//} -//catch (exception) -//{ -// logging.log(" exception-> " + exception); -//} diff --git a/entity/DuplicatesUnrelated_entity/DuplicatesUnrelated_entity.aod b/entity/DuplicatesUnrelated_entity/DuplicatesUnrelated_entity.aod deleted file mode 100644 index 7d2241b72c2014a236b28f81f3f8a49060a1f768..0000000000000000000000000000000000000000 --- a/entity/DuplicatesUnrelated_entity/DuplicatesUnrelated_entity.aod +++ /dev/null @@ -1,91 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<entity xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.3.18" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/entity/1.3.18"> - <name>DuplicatesUnrelated_entity</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <documentation>%aditoprj%/entity/DuplicatesUnrelated_entity/documentation.adoc</documentation> - <siblings> - <element>Duplicates_entity</element> - </siblings> - <recordContainer>jditoRecordContainer</recordContainer> - <entityFields> - <entityProvider> - <name>#PROVIDER</name> - </entityProvider> - <entityProvider> - <name>UnrelatedPersonsProvider</name> - <titlePlural>Unrelated person duplicates</titlePlural> - <children> - <entityParameter> - <name>TargetEntity</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <expose v="true" /> - <mandatory v="false" /> - </entityParameter> - </children> - </entityProvider> - <entityParameter> - <name>TargetEntity</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityProvider> - <name>UnrelatedOrganisationsProvider</name> - <titlePlural>Unrelated organisations duplicates</titlePlural> - <children> - <entityParameter> - <name>TargetEntity</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <expose v="true" /> - <mandatory v="false" /> - </entityParameter> - </children> - </entityProvider> - <entityField> - <name>SourceDuplicateDescription</name> - <title>Source duplicate</title> - </entityField> - <entityField> - <name>UnrelatedDuplicateDescription</name> - <title>Unrelated duplicate</title> - </entityField> - <entityField> - <name>UID</name> - </entityField> - <entityParameter> - <name>ClusterId_param</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityProvider> - <name>#PROVIDER_AGGREGATES</name> - <useAggregates v="true" /> - </entityProvider> - </entityFields> - <recordContainers> - <jDitoRecordContainer> - <name>jditoRecordContainer</name> - <jDitoRecordAlias>Data_alias</jDitoRecordAlias> - <contentProcess>%aditoprj%/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/contentProcess.js</contentProcess> - <onDelete>%aditoprj%/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/onDelete.js</onDelete> - <recordFieldMappings> - <jDitoRecordFieldMapping> - <name>UID.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>SourceDuplicateDescription.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>UnrelatedDuplicateDescription.value</name> - </jDitoRecordFieldMapping> - </recordFieldMappings> - </jDitoRecordContainer> - </recordContainers> -</entity> diff --git a/entity/DuplicatesUnrelated_entity/documentation.adoc b/entity/DuplicatesUnrelated_entity/documentation.adoc deleted file mode 100644 index 06d0fa27494b89bf6d02bef96f9872cb1409c379..0000000000000000000000000000000000000000 --- a/entity/DuplicatesUnrelated_entity/documentation.adoc +++ /dev/null @@ -1,3 +0,0 @@ -=DuplicateUnrelated_entity - -These Duplicates not related to another Entity. \ No newline at end of file diff --git a/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/contentProcess.js b/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/contentProcess.js deleted file mode 100644 index 7f6beb6cd3e8fff07df2f33aa29835976eedc12d..0000000000000000000000000000000000000000 --- a/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/contentProcess.js +++ /dev/null @@ -1,73 +0,0 @@ -import("system.result"); -import("system.vars"); -import("system.db"); -import("Sql_lib"); - -var INDEX_ID = 0; -var INDEX_SOURCE_INFO1 = 1; -var INDEX_SOURCE_INFO2 = 3; -var INDEX_UNRELATED_INFO1 = 2; -var INDEX_UNRELATED_INFO2 = 4; -var INDEX_SOURCE_INFO = 1; -var INDEX_UNRELATED_INFO = 2; - - -let unrelatedDuplicates = []; -let resultUnrelatedDuplicates = []; -let targetEntity = vars.get("$param.TargetEntity"); -let clusterId = vars.get("$param.ClusterId_param"); -let querySelect = new SqlBuilder(); - -if(targetEntity == 'Person_entity') -{ - querySelect.select("ud.ID," - + " pSource.FIRSTNAME, pUnrelated.FIRSTNAME," - + " pSource.LASTNAME, pUnrelated.LASTNAME") - .from("UNRELATEDDUPLICATES", "ud") - .join("CONTACT", "cUnrelated.CONTACTID = ud.UNRELATEDDUPLICATEID", "cUnrelated") - .join("PERSON", "pUnrelated.PERSONID = cUnrelated.PERSON_ID", "pUnrelated") - .join("CONTACT", "cSource.CONTACTID = ud.SOURCEDUPLICATEID", "cSource") - .join("PERSON", "pSource.PERSONID = cSource.PERSON_ID", "pSource") - //If the clusterid parameter is present, only load the duplicates for this particular cluster - .whereIfSet(["UNRELATEDDUPLICATES", "CLUSTERID", "ud"], clusterId); - -} -else -{ - querySelect.select("ud.ID," - + " oSource.\"NAME\"," - + " oUnrelated.\"NAME\"") - .from("UNRELATEDDUPLICATES", "ud") - .join("CONTACT", "cUnrelated.CONTACTID = ud.UNRELATEDDUPLICATEID", "cUnrelated") - .join("ORGANISATION", "oUnrelated.ORGANISATIONID = cUnrelated.CONTACTID", "oUnrelated") - .join("CONTACT", "cSource.CONTACTID = ud.SOURCEDUPLICATEID", "cSource") - .join("ORGANISATION", "oSource.ORGANISATIONID = cSource.CONTACTID", "oSource") - //If the clusterid parameter is present, only load the duplicates for this particular cluster - .whereIfSet(["UNRELATEDDUPLICATES", "CLUSTERID", "ud"], clusterId); -} - -unrelatedDuplicates = querySelect.table(); - -for (let i = 0; i < unrelatedDuplicates.length; i++) -{ - let id = unrelatedDuplicates[i][INDEX_ID]; - let sourceInfo = ""; - let unrelatedInfo = ""; - - let sourceInfo1 = unrelatedDuplicates[i][INDEX_SOURCE_INFO1]; - let sourceInfo2 = unrelatedDuplicates[i][INDEX_SOURCE_INFO2]; - let unrelatedInfo1 = unrelatedDuplicates[i][INDEX_UNRELATED_INFO1]; - let unrelatedInfo2 = unrelatedDuplicates[i][INDEX_UNRELATED_INFO2]; - - sourceInfo = sourceInfo1; - if(sourceInfo2 != undefined) - sourceInfo += " " + sourceInfo2; - - unrelatedInfo = unrelatedInfo1; - if(unrelatedInfo2 != undefined) - unrelatedInfo += " " + unrelatedInfo2; - - resultUnrelatedDuplicates.push([id, sourceInfo, unrelatedInfo]); -} - -result.object(resultUnrelatedDuplicates); \ No newline at end of file diff --git a/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/onDelete.js b/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/onDelete.js deleted file mode 100644 index 4cb5ca4f2a88d7144b24d27ecf99028f547ca144..0000000000000000000000000000000000000000 --- a/entity/DuplicatesUnrelated_entity/recordcontainers/jditorecordcontainer/onDelete.js +++ /dev/null @@ -1,3 +0,0 @@ -import("Sql_lib"); - -newWhere("UNRELATEDDUPLICATES.ID", "$local.uid").deleteData(); \ No newline at end of file diff --git a/entity/Duplicates_entity/Duplicates_entity.aod b/entity/Duplicates_entity/Duplicates_entity.aod deleted file mode 100644 index fe9a19485083c678b01823430ae02631da122683..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/Duplicates_entity.aod +++ /dev/null @@ -1,234 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<entity xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.3.18" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/entity/1.3.18"> - <name>Duplicates_entity</name> - <title>Duplicates</title> - <majorModelMode>DISTRIBUTED</majorModelMode> - <documentation>%aditoprj%/entity/Duplicates_entity/documentation.adoc</documentation> - <siblings> - <element>DuplicatesUnrelated_entity</element> - </siblings> - <iconId>VAADIN:DATABASE</iconId> - <recordContainer>recordContainer</recordContainer> - <entityFields> - <entityProvider> - <name>#PROVIDER</name> - </entityProvider> - <entityField> - <name>CLUSTER_DESCRIPTION</name> - <title>Cluster description</title> - </entityField> - <entityField> - <name>COUNT_DUPLICATES_IN_CLUSTER</name> - <title>Count duplicates in cluster</title> - <contentType>NUMBER</contentType> - </entityField> - <entityField> - <name>TARGET_ENTITY</name> - </entityField> - <entityField> - <name>UID</name> - </entityField> - <entityProvider> - <name>SelfPersonDuplicatesProvider</name> - <titlePluralProcess>%aditoprj%/entity/Duplicates_entity/entityfields/selfpersonduplicatesprovider/titlePluralProcess.js</titlePluralProcess> - <titlePlural>Person duplicates</titlePlural> - <children> - <entityParameter> - <name>TargetEntity</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - </children> - </entityProvider> - <entityConsumer> - <name>SelfPersonDuplicatesConsumer</name> - <dependency> - <name>dependency</name> - <entityName>Duplicates_entity</entityName> - <fieldName>SelfPersonDuplicatesProvider</fieldName> - </dependency> - <children> - <entityParameter> - <name>TargetEntity</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/targetentity/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/clusterid_param/valueProcess.js</valueProcess> - <title></title> - </entityParameter> - </children> - </entityConsumer> - <entityParameter> - <name>TargetEntity</name> - <expose v="true" /> - <mandatory v="true" /> - </entityParameter> - <entityConsumer> - <name>SelfOrganisationDuplicatesConsumer</name> - <dependency> - <name>dependency</name> - <entityName>Duplicates_entity</entityName> - <fieldName>SelfOrganisationDuplicatesProvider</fieldName> - </dependency> - <children> - <entityParameter> - <name>TargetEntity</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/targetentity/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/clusterid_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> - <entityProvider> - <name>SelfOrganisationDuplicatesProvider</name> - <titlePlural>Organisation duplicates</titlePlural> - </entityProvider> - <entityConsumer> - <name>DuplicatesUnrelatedPersonConsumer</name> - <dependency> - <name>dependency</name> - <entityName>DuplicatesUnrelated_entity</entityName> - <fieldName>UnrelatedPersonsProvider</fieldName> - </dependency> - <children> - <entityParameter> - <name>TargetEntity</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/targetentity/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/clusterid_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> - <entityField> - <name>CLUSTER_ID</name> - </entityField> - <entityConsumer> - <name>DuplicatePersonsConsumer</name> - <dependency> - <name>dependency</name> - <entityName>Person_entity</entityName> - <fieldName>NonselfDuplicates</fieldName> - </dependency> - <children> - <entityParameter> - <name>OnlyShowContactIds_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/onlyshowcontactids_param/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>DuplicateActionsControl_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/duplicateactionscontrol_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> - <entityActionGroup> - <name>DuplicateClusterActionGroup</name> - <title>Duplicate actions</title> - <children> - <entityActionField> - <name>IgnoreWholeCluster</name> - <title>Ignore whole cluster</title> - <onActionProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicateclusteractiongroup/children/ignorewholecluster/onActionProcess.js</onActionProcess> - <isSelectionAction v="true" /> - <iconId>VAADIN:CLOSE</iconId> - </entityActionField> - </children> - </entityActionGroup> - <entityConsumer> - <name>DuplicatesUnrelatedOrganisationConsumer</name> - <dependency> - <name>dependency</name> - <entityName>DuplicatesUnrelated_entity</entityName> - <fieldName>UnrelatedOrganisationsProvider</fieldName> - </dependency> - <children> - <entityParameter> - <name>TargetEntity</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/targetentity/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>ClusterId_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/clusterid_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> - <entityActionGroup> - <name>PersonOpenClusterDetailActionGroup</name> - <children> - <entityActionField> - <name>PersonOpenClusterDetail</name> - <title></title> - <onActionProcess>%aditoprj%/entity/Duplicates_entity/entityfields/personopenclusterdetailactiongroup/children/personopenclusterdetail/onActionProcess.js</onActionProcess> - <isSelectionAction v="true" /> - <iconId>VAADIN:FOLDER_OPEN</iconId> - </entityActionField> - </children> - </entityActionGroup> - <entityParameter> - <name>ClusterId_param</name> - <expose v="true" /> - </entityParameter> - <entityActionGroup> - <name>OrganisationOpenClusterDetailActionGroup</name> - <children> - <entityActionField> - <name>OrganisationOpenClusterDetail</name> - <title>Open cluster detail</title> - <onActionProcess>%aditoprj%/entity/Duplicates_entity/entityfields/organisationopenclusterdetailactiongroup/children/organisationopenclusterdetail/onActionProcess.js</onActionProcess> - <isSelectionAction v="true" /> - <iconId>VAADIN:FOLDER_OPEN</iconId> - </entityActionField> - </children> - </entityActionGroup> - <entityConsumer> - <name>DuplicateOrganisationsConsumer</name> - <dependency> - <name>dependency</name> - <entityName>Organisation_entity</entityName> - <fieldName>NonselfDuplicates</fieldName> - </dependency> - <children> - <entityParameter> - <name>DuplicateActionsControl_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/duplicateactionscontrol_param/valueProcess.js</valueProcess> - </entityParameter> - <entityParameter> - <name>OnlyShowContactIds_param</name> - <valueProcess>%aditoprj%/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/onlyshowcontactids_param/valueProcess.js</valueProcess> - </entityParameter> - </children> - </entityConsumer> - <entityProvider> - <name>#PROVIDER_AGGREGATES</name> - <useAggregates v="true" /> - </entityProvider> - </entityFields> - <recordContainers> - <jDitoRecordContainer> - <name>recordContainer</name> - <jDitoRecordAlias>Data_alias</jDitoRecordAlias> - <contentProcess>%aditoprj%/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js</contentProcess> - <recordFieldMappings> - <jDitoRecordFieldMapping> - <name>UID.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>CLUSTER_DESCRIPTION.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>COUNT_DUPLICATES_IN_CLUSTER.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>TARGET_ENTITY.value</name> - </jDitoRecordFieldMapping> - <jDitoRecordFieldMapping> - <name>CLUSTER_ID.value</name> - </jDitoRecordFieldMapping> - </recordFieldMappings> - </jDitoRecordContainer> - </recordContainers> -</entity> diff --git a/entity/Duplicates_entity/documentation.adoc b/entity/Duplicates_entity/documentation.adoc deleted file mode 100644 index aa8d46c4a31d622c6ed142e3e92422218a302ae9..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/documentation.adoc +++ /dev/null @@ -1,3 +0,0 @@ -=Duplicates_entity - -This Entity shows the Duplicates of \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicateclusteractiongroup/children/ignorewholecluster/onActionProcess.js b/entity/Duplicates_entity/entityfields/duplicateclusteractiongroup/children/ignorewholecluster/onActionProcess.js deleted file mode 100644 index 77822dbca0461d5ffccc31af058b9a5750002a4b..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicateclusteractiongroup/children/ignorewholecluster/onActionProcess.js +++ /dev/null @@ -1,27 +0,0 @@ -import("system.logging"); -import("system.neon"); -import("system.vars"); -import("DuplicateScanner_lib"); -import("system.notification"); - -let clusterId = vars.get("$sys.selection"); - -let duplicateContactIdsInClusterRay = DuplicateScannerUtils.getCachedDuplicatesForClusterId(clusterId) - -if(duplicateContactIdsInClusterRay.length > 1) -{ - let referenceDuplicateId = duplicateContactIdsInClusterRay[0]; - for (let i = 1; i < duplicateContactIdsInClusterRay.length; i++) - { - DuplicateScannerUtils.createUnrelatedDuplicateRelation(referenceDuplicateId, duplicateContactIdsInClusterRay[i], clusterId); - } - //notification.createConfig().notificationType(notification.t) - //neon.refresh(["$field.SelfPersonDuplicatesConsumer"]) - - //todo Temporary!!! In the first refresh is the record via idValue selected and gets refreshed but stays visible in the record container - //todo Temporary!!! on the second refresh, no selecten remains and the container loads the remaining records as expected - neon.refreshAll(); - neon.refreshAll(); -} - - diff --git a/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/duplicateactionscontrol_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/duplicateactionscontrol_param/valueProcess.js deleted file mode 100644 index 5267adbe23e51fbb6b2c1c2aa44c947c3c3e7c34..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/duplicateactionscontrol_param/valueProcess.js +++ /dev/null @@ -1,2 +0,0 @@ -import("system.result"); -result.string("2");//todo exchange with keyword diff --git a/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/onlyshowcontactids_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/onlyshowcontactids_param/valueProcess.js deleted file mode 100644 index ecb8c518cd40068c5e36606b64c83765a69962f6..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicateorganisationsconsumer/children/onlyshowcontactids_param/valueProcess.js +++ /dev/null @@ -1,18 +0,0 @@ -import("system.logging"); -import("system.result"); -import("system.vars"); -import("DuplicateScanner_lib"); - -let clusterRecordId = vars.get("$field.UID"); - -let contactIdsInCluster = DuplicateScannerUtils.getCachedDuplicatesForClusterId(clusterRecordId); - -/* - * To achieve that if there are no duplicates, no contacts should be shown and therefore returned by the - * recordcontainer, an invalid id gets returned. It then is used in the conditionProcess to load the duplicates. - * Because of its invalidity, no records are shown. -*/ -if(contactIdsInCluster.length == 0) - result.string(JSON.stringify(["nodata"])); -else - result.string(JSON.stringify(contactIdsInCluster)); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/duplicateactionscontrol_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/duplicateactionscontrol_param/valueProcess.js deleted file mode 100644 index 5267adbe23e51fbb6b2c1c2aa44c947c3c3e7c34..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/duplicateactionscontrol_param/valueProcess.js +++ /dev/null @@ -1,2 +0,0 @@ -import("system.result"); -result.string("2");//todo exchange with keyword diff --git a/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/onlyshowcontactids_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/onlyshowcontactids_param/valueProcess.js deleted file mode 100644 index ecb8c518cd40068c5e36606b64c83765a69962f6..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatepersonsconsumer/children/onlyshowcontactids_param/valueProcess.js +++ /dev/null @@ -1,18 +0,0 @@ -import("system.logging"); -import("system.result"); -import("system.vars"); -import("DuplicateScanner_lib"); - -let clusterRecordId = vars.get("$field.UID"); - -let contactIdsInCluster = DuplicateScannerUtils.getCachedDuplicatesForClusterId(clusterRecordId); - -/* - * To achieve that if there are no duplicates, no contacts should be shown and therefore returned by the - * recordcontainer, an invalid id gets returned. It then is used in the conditionProcess to load the duplicates. - * Because of its invalidity, no records are shown. -*/ -if(contactIdsInCluster.length == 0) - result.string(JSON.stringify(["nodata"])); -else - result.string(JSON.stringify(contactIdsInCluster)); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/clusterid_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/clusterid_param/valueProcess.js deleted file mode 100644 index 152dfe0324a75ddba53552148d86b4af97acc6dd..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/clusterid_param/valueProcess.js +++ /dev/null @@ -1,5 +0,0 @@ -import("system.vars"); -import("system.result"); - -let clusterId = vars.get("$field.UID"); -result.string(clusterId); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/targetentity/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/targetentity/valueProcess.js deleted file mode 100644 index ea14ae6b612db05a0e3a34e900ca16547f9ed208..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatesunrelatedorganisationconsumer/children/targetentity/valueProcess.js +++ /dev/null @@ -1,4 +0,0 @@ -import("system.vars"); -import("system.result"); - -result.string("Organisation_entity"); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/clusterid_param/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/clusterid_param/valueProcess.js deleted file mode 100644 index 03d5df5be8044683b94f306002bcbeada25b8737..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/clusterid_param/valueProcess.js +++ /dev/null @@ -1,6 +0,0 @@ -import("system.vars"); -import("system.result"); - -//let clusterId = vars.get("$field.UID"); -let clusterId = vars.get("$param.ClusterId_param"); -result.string(clusterId); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/targetentity/valueProcess.js b/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/targetentity/valueProcess.js deleted file mode 100644 index f8b07f56abc4e6b2df8800916a77fa58b50e99bf..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/duplicatesunrelatedpersonconsumer/children/targetentity/valueProcess.js +++ /dev/null @@ -1,4 +0,0 @@ -import("system.vars"); -import("system.result"); - -result.string("Person_entity"); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/organisationopenclusterdetailactiongroup/children/organisationopenclusterdetail/onActionProcess.js b/entity/Duplicates_entity/entityfields/organisationopenclusterdetailactiongroup/children/organisationopenclusterdetail/onActionProcess.js deleted file mode 100644 index f599915bdc2dbbb45a9b52bb92e7de622c74f618..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/organisationopenclusterdetailactiongroup/children/organisationopenclusterdetail/onActionProcess.js +++ /dev/null @@ -1,12 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.neon"); - -let contextName = "Duplicates"; -let viewName = "OrganisationClusterMain_view"; - -var params = {}; -params["ClusterId_param"] = vars.get("$sys.selection")[0]; -params["TargetEntity"] = "Organisation_entity"; - -neon.openContext(contextName, viewName, null, neon.OPERATINGSTATE_VIEW, params); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/personopenclusterdetailactiongroup/children/personopenclusterdetail/onActionProcess.js b/entity/Duplicates_entity/entityfields/personopenclusterdetailactiongroup/children/personopenclusterdetail/onActionProcess.js deleted file mode 100644 index 9d5cf8d6da175cc02200a6874c07f460589f6745..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/personopenclusterdetailactiongroup/children/personopenclusterdetail/onActionProcess.js +++ /dev/null @@ -1,12 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.neon"); - -let contextName = "Duplicates"; -let viewName = "PersonClusterMain_view"; - -var params = {}; -params["ClusterId_param"] = vars.get("$sys.selection")[0]; -params["TargetEntity"] = "Person_entity"; - -neon.openContext(contextName, viewName, null, neon.OPERATINGSTATE_VIEW, params); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/clusterid_param/valueProcess.js b/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/clusterid_param/valueProcess.js deleted file mode 100644 index 8ef7b768764a4f534b886492c99ec2e9408c00e5..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/clusterid_param/valueProcess.js +++ /dev/null @@ -1,5 +0,0 @@ -import("system.vars"); -import("system.result"); - -let clusterId = vars.get("$param.ClusterId_param"); -result.string(clusterId); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/targetentity/valueProcess.js b/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/targetentity/valueProcess.js deleted file mode 100644 index e781fb72fd248164b8b63a98008094744aee7460..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/selforganisationduplicatesconsumer/children/targetentity/valueProcess.js +++ /dev/null @@ -1,2 +0,0 @@ -import("system.result"); -result.string("Organisation_entity"); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/clusterid_param/valueProcess.js b/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/clusterid_param/valueProcess.js deleted file mode 100644 index 258d52e236265f259b354a56fd04b1a3d2fcb566..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/clusterid_param/valueProcess.js +++ /dev/null @@ -1,6 +0,0 @@ -import("system.vars"); -import("system.result"); - -//let clusterId = vars.get("$sys.selection"); -let clusterId = vars.get("$param.ClusterId_param"); -result.string(clusterId); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/targetentity/valueProcess.js b/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/targetentity/valueProcess.js deleted file mode 100644 index 0f7bee25ea3bd34aeeceff7f47f7f9118e69b7ba..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/selfpersonduplicatesconsumer/children/targetentity/valueProcess.js +++ /dev/null @@ -1,2 +0,0 @@ -import("system.result"); -result.string("Person_entity"); \ No newline at end of file diff --git a/entity/Duplicates_entity/entityfields/selfpersonduplicatesprovider/titlePluralProcess.js b/entity/Duplicates_entity/entityfields/selfpersonduplicatesprovider/titlePluralProcess.js deleted file mode 100644 index b213a20410d97225b6a299a51857dcf0057ba50c..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/entityfields/selfpersonduplicatesprovider/titlePluralProcess.js +++ /dev/null @@ -1,3 +0,0 @@ -import("system.result"); -import("system.translate"); -result.string(translate.text("Person duplicates")); \ No newline at end of file diff --git a/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js b/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js deleted file mode 100644 index fc29941db97b09cd069a88b166304eb2be952b1e..0000000000000000000000000000000000000000 --- a/entity/Duplicates_entity/recordcontainers/recordcontainer/contentProcess.js +++ /dev/null @@ -1,121 +0,0 @@ -import("Sql_lib"); -import("system.logging"); -import("system.db"); -import("system.vars"); -import("system.result"); - -var INDEX_CLUSTERID = 0; -var INDEX_FIRSTNAME = 1; -var INDEX_LASTNAME = 2; -var INDEX_ORGNAME = 1; - -let targetEntity = vars.get("$param.TargetEntity"); -let duplicates = []; - -let selectedClusterId = vars.get("$param.ClusterId_param"); - -let duplicateInfosQuery = new SqlBuilder(); - -let selectedId = vars.get("$local.idvalues"); - -if(selectedId) -{ - /* - * Definitely a todo. - * Support for the action "Ignore whole cluster" - * If this action is used two times in a row, an error occurs on the second time. - * Although the selected record isn't present in the recordcontainer any more, the core tries to load a record with the same id. - * As a result an error gets thrown. If a dummy record gets returned here, it plays along with the current internal logic and doesn't throw an error. - * If a preview should be shown, this part of the container has to be extended. - */ - duplicates.push([selectedId, "", "", "", ""]); - result.object(duplicates); -} -else -{ - if(targetEntity == "Person_entity") - { - duplicateInfosQuery.select("CLUSTERID, FIRSTNAME, LASTNAME") - .from("DUPLICATECLUSTERS") - .join("CONTACT", "CONTACT.CONTACTID = DUPLICATEID") - .join("PERSON", "PERSON.PERSONID = CONTACT.PERSON_ID") - .where("DUPLICATEID not in (select UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID from UNRELATEDDUPLICATES)") - .andIfSet("DUPLICATECLUSTERS.CLUSTERID", selectedClusterId) - .orderBy("CLUSTERID"); - } - else - { - duplicateInfosQuery.select("CLUSTERID, ORGANISATION.\"NAME\"") - .from("DUPLICATECLUSTERS") - .join("CONTACT", "CONTACT.CONTACTID = DUPLICATEID") - .join("ORGANISATION", "ORGANISATION.ORGANISATIONID = CONTACT.ORGANISATION_ID") - .where("DUPLICATEID not in (select UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID from UNRELATEDDUPLICATES)") - .andIfSet("DUPLICATECLUSTERS.CLUSTERID", vars.get("$local.idvalues"), SqlBuilder.IN()) - .orderBy("CLUSTERID"); - } - let duplicateInfos = duplicateInfosQuery.table(); - - let MAX_SHOW_CLUSTER_RECORDS = 4; - let recordClusterId = ""; - let recordDescription = ""; - let recordDuplicateInClusterCount = 0; - - for (let i = 0; i < duplicateInfos.length; i++) - { - let currentClusterId = duplicateInfos[i][INDEX_CLUSTERID]; - let currentDescription = ""; - - //Build the description depending on the targetEntity - if(targetEntity == "Person_entity") - currentDescription = duplicateInfos[i][INDEX_FIRSTNAME] + " " + duplicateInfos[i][INDEX_LASTNAME]; - else - currentDescription = duplicateInfos[i][INDEX_ORGNAME]; - - if(i == 0) - { - recordClusterId = currentClusterId; - recordDescription = currentDescription; - recordDuplicateInClusterCount = 1; - continue; - } - - //If the record belongs to the same Cluster as the one before, append its value and increase the counter - //otherwise write the clusters record an start a new record. - if(recordClusterId == currentClusterId) - { - if(recordDuplicateInClusterCount < MAX_SHOW_CLUSTER_RECORDS) - recordDescription += ", " + currentDescription; - if(recordDuplicateInClusterCount == MAX_SHOW_CLUSTER_RECORDS) - recordDescription += ", ..." - recordDuplicateInClusterCount++; - - /* - * Finish the current record if its the last duplicate. - * It has to be checked wether or not more than one element is currently in the cluster: - * Normally, there are always at least 2 elements in a cluster. If then a duplicate relation - * is beign ignored, there's only one record left in this particluar cluster. - * As there are then no interactions possible (and a cluster of one is no cluster), this cluster musn't be shown in the list. - */ - if(i == duplicateInfos.length-1 && recordDuplicateInClusterCount > 1) - duplicates.push([recordClusterId, recordDescription, recordDuplicateInClusterCount, targetEntity, recordClusterId]); - } - else - { - /* - * Finish the current record if its the next cluster. - * It has to be checked wether or not more than one element is currently in the cluster: - * Normally, there are always at least 2 elements in a cluster. If then a duplicate relation - * is beign ignored, there's only one record left in this particluar cluster. - * As there would be no interactions possible (and a cluster of one is no cluster), this cluster musn't be shown in the list. - */ - if(recordDuplicateInClusterCount > 1) - duplicates.push([recordClusterId, recordDescription, recordDuplicateInClusterCount, targetEntity, recordClusterId]); - - recordClusterId = currentClusterId - recordDescription = currentDescription - recordDuplicateInClusterCount = 1; - } - } - - result.object(duplicates); -} diff --git a/entity/Organisation_entity/Organisation_entity.aod b/entity/Organisation_entity/Organisation_entity.aod index a1c9dc5748145d7807a8927740c9383fa46d735c..dd647881157686f2719bf326af969cb1f5ef0942 100644 --- a/entity/Organisation_entity/Organisation_entity.aod +++ b/entity/Organisation_entity/Organisation_entity.aod @@ -1026,10 +1026,6 @@ <name>OnlyShowContactIds_param</name> <valueProcess>%aditoprj%/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js</valueProcess> </entityParameter> - <entityParameter> - <name>ExcludedContactIds_param</name> - <valueProcess>%aditoprj%/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js</valueProcess> - </entityParameter> </children> </entityConsumer> <entityConsumer> @@ -1264,52 +1260,6 @@ <name>FilterPreSet_param</name> <expose v="true" /> </entityParameter> - <entityProvider> - <name>NonselfDuplicates</name> - <documentation>%aditoprj%/entity/Organisation_entity/entityfields/nonselfduplicates/documentation.adoc</documentation> - <dependencies> - <entityDependency> - <name>2e410b9e-5ebc-48ea-9562-da386202d7e8</name> - <entityName>Duplicates_entity</entityName> - <fieldName>DuplicateOrganisationsConsumer</fieldName> - <isConsumer v="false" /> - </entityDependency> - </dependencies> - <children> - <entityParameter> - <name>AttributeKeyId_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>DuplicateCurrentContactId_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>ExcludedContactIds_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>OnlyShowContactIds_param</name> - <expose v="true" /> - </entityParameter> - <entityParameter> - <name>OnlyOwnSupervised_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>MapViewAdditionalFeatures_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>MapViewCenterLon_param</name> - <expose v="false" /> - </entityParameter> - <entityParameter> - <name>MapViewCenterLat_param</name> - <expose v="false" /> - </entityParameter> - </children> - </entityProvider> <entityConsumer> <name>AttributesFilter</name> <dependency> @@ -1526,22 +1476,13 @@ <stateProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/stateProcess.js</stateProcess> </entityActionField> <entityActionField> - <name>IgnoreDuplicate</name> - <title>Ignore Duplicate</title> - <onActionProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js</onActionProcess> - <isMenuAction v="true" /> + <name>IgnoreDuplicates</name> + <title>Ignore Duplicate(s)</title> + <onActionProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js</onActionProcess> <isObjectAction v="false" /> <isSelectionAction v="true" /> <iconId>VAADIN:CLOSE</iconId> - <stateProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js</stateProcess> - </entityActionField> - <entityActionField> - <name>IgnoreWholeCluster</name> - <title>Ignore whole Cluster</title> - <onActionProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js</onActionProcess> - <isObjectAction v="false" /> - <iconId>VAADIN:CLOSE</iconId> - <stateProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js</stateProcess> + <stateProcess>%aditoprj%/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js</stateProcess> </entityActionField> </children> </entityActionGroup> @@ -1880,6 +1821,13 @@ <filterConditionProcess>%aditoprj%/entity/Organisation_entity/recordcontainers/db/filterextensions/responsibleassignment/filterConditionProcess.js</filterConditionProcess> <filtertype>EXTENDED</filtertype> </filterExtension> + <filterExtension> + <name>Duplicates_filter</name> + <title>Duplicates</title> + <contentType>NUMBER</contentType> + <filterConditionProcess>%aditoprj%/entity/Organisation_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js</filterConditionProcess> + <filtertype>BASIC</filtertype> + </filterExtension> <filterExtension> <name>Communication_Mail_filter</name> <title>Communication: Mail</title> diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js deleted file mode 100644 index c9289cbd0ef896d7dbcf5eae668bac8a87e861da..0000000000000000000000000000000000000000 --- a/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js +++ /dev/null @@ -1,10 +0,0 @@ -import("system.neon"); -import("system.vars"); -import("DuplicateScanner_lib"); - -let sourceContactId = vars.get("$param.DuplicateCurrentContactId_param"); -let selectedContactId = vars.get("$sys.selection"); -let clusterId = DuplicateScannerUtils.getClusterId(sourceContactId); -DuplicateScannerUtils.createUnrelatedDuplicateRelation(sourceContactId, selectedContactId, clusterId); - -neon.refreshAll(); \ No newline at end of file diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..1170b6b210bbd7a325950bf151609ddfaffb2ffd --- /dev/null +++ b/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js @@ -0,0 +1,10 @@ +import("system.neon"); +import("system.vars"); +import("system.result"); +import("DuplicateScanner_lib"); + +var selection = vars.get("$sys.selection"); +var contactId = vars.get("$param.DuplicateCurrentContactId_param"); +DuplicateScannerUtils.ignoreDuplicates("Organisation_entity", contactId, selection); +// Update the rows, because UNRELATEDDUPLICATES wont trigger a refresh even with write entities +neon.refreshAll(); diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js similarity index 100% rename from entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js rename to entity/Organisation_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js deleted file mode 100644 index 99d7f55925b3fed54dc2d5e20604793b6e9fd862..0000000000000000000000000000000000000000 --- a/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js +++ /dev/null @@ -1,21 +0,0 @@ -import("system.logging"); -import("system.neon"); -import("system.vars"); -import("DuplicateScanner_lib"); -import("system.notification"); - -let contactId = vars.get("$field.CONTACTID"); -let clusterId = DuplicateScannerUtils.getClusterId(contactId); - -let duplicateContactIdsInClusterRay = DuplicateScannerUtils.getCachedDuplicatesForClusterId(clusterId) - -if(duplicateContactIdsInClusterRay.length > 1) -{ - let referenceDuplicateId = duplicateContactIdsInClusterRay[0]; - for (let i = 1; i < duplicateContactIdsInClusterRay.length; i++) - { - DuplicateScannerUtils.createUnrelatedDuplicateRelation(referenceDuplicateId, duplicateContactIdsInClusterRay[i], clusterId); - } -} - - diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js deleted file mode 100644 index fdd913ad06bf2815da3127d405d21b258c4ad795..0000000000000000000000000000000000000000 --- a/entity/Organisation_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js +++ /dev/null @@ -1,11 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.neon"); -import("system.result"); - -//Actions to show in the duplicates view inside the persons main view - -let actionState = vars.get("$param.DuplicateActionsControl_param"); - -if(actionState != null && actionState != "2")//todo replace with keyword - result.string(neon.COMPONENTSTATE_INVISIBLE); \ No newline at end of file diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js index 29dab7b72c8898044cfabd79426552733f9f7dc4..215e7717849566576365ba23ea84e846983f4639 100644 --- a/entity/Organisation_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js +++ b/entity/Organisation_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js @@ -1,20 +1,20 @@ import("Employee_lib"); import("system.vars"); import("system.neon"); -import("DuplicateScanner_lib"); +import("DuplicateMerge_lib"); let sourceContactId = vars.get("$param.DuplicateCurrentContactId_param"); let targetContactId = vars.get("$sys.selection")[0]; //todo the actual merge ought to happen in a separate view where the contact infos can be merged manually by the user. -let mergeSuccess = DuplicateScannerUtils.mergeOrganisation(sourceContactId, targetContactId); +let mergeSuccess = DuplicateMergeUtils.mergeOrganisation(sourceContactId, targetContactId); if(mergeSuccess) { let currentContactId = EmployeeUtils.getCurrentContactId(); if(currentContactId == null) currentContactId = ""; - DuplicateScannerUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Organisation"); + DuplicateMergeUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Organisation"); neon.openContext("Organisation", "OrganisationMain_view", [targetContactId], neon.OPERATINGSTATE_VIEW, null) } \ No newline at end of file diff --git a/entity/Organisation_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js b/entity/Organisation_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js index 5ac58579f1326724bf4e6340a751ee86b12850b4..dbc79d21be81b7047ed7932d561cb65cc475637b 100644 --- a/entity/Organisation_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js +++ b/entity/Organisation_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js @@ -1,20 +1,20 @@ import("Employee_lib"); import("system.vars"); import("system.neon"); -import("DuplicateScanner_lib"); +import("DuplicateMerge_lib"); let targetContactId = vars.get("$param.DuplicateCurrentContactId_param"); let sourceContactId = vars.get("$sys.selection")[0]; //todo the actual merge ought to happen in a separate view where the contact infos can be merged manually by the user. -let mergeSuccess = DuplicateScannerUtils.mergeOrganisation(sourceContactId, targetContactId); +let mergeSuccess = DuplicateMergeUtils.mergeOrganisation(sourceContactId, targetContactId); if(mergeSuccess) { let currentContactId = EmployeeUtils.getCurrentContactId(); if(currentContactId == null) currentContactId = ""; - DuplicateScannerUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Organisation"); + DuplicateMergeUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Organisation"); //neon.refresh() with no fields will refresh the current image (and all sub images) but NOT the preview. neon.refreshAll() would refresh both, //why it would lead to an error because it's trying to load the already opened preview of the duplicateContact which just got deleted //and does not exist any more which results in an exception diff --git a/entity/Organisation_entity/entityfields/nonselfduplicates/documentation.adoc b/entity/Organisation_entity/entityfields/nonselfduplicates/documentation.adoc deleted file mode 100644 index dc35980c12405163da2417e0472197dd9e3c7285..0000000000000000000000000000000000000000 --- a/entity/Organisation_entity/entityfields/nonselfduplicates/documentation.adoc +++ /dev/null @@ -1,3 +0,0 @@ -Provides organisation duplicate-records without the `Organisation_entity` scope, for example for the `Duplicates_entity`. - -The provider is named `NonselfDuplicates` to differentiate this provider and the `SelfDuplicates`-provider. \ No newline at end of file diff --git a/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js b/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js deleted file mode 100644 index 849632882bbabd4e65dfb25c7dc984366bde5116..0000000000000000000000000000000000000000 --- a/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.result"); -import("DuplicateScanner_lib"); - -let unrelatedIds = DuplicateScannerUtils.getUnrelatedRelationsForDuplicate(vars.get("$field.CONTACTID")); -result.string(JSON.stringify(unrelatedIds)); \ No newline at end of file diff --git a/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js b/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js index 77f449d52a0995e5bb7e8ba94746a1659bf36001..5eacae644eb24d4c6d12cdc1adeb70395f1a6046 100644 --- a/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js +++ b/entity/Organisation_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js @@ -1,55 +1,5 @@ -import("system.project"); -import("system.indexsearch"); -import("system.vars"); import("DuplicateScanner_lib"); import("system.result"); -var scannerName = "OrganisationDuplicates"; -var targetEntity = "Organisation_entity"; -var valuesToCheck = {}; -var entityFieldsToLoad = DuplicateScannerUtils.getEntityFieldObjectFromConfig(scannerName, targetEntity); - -var idsForEmptyResult = JSON.stringify(["nodata"]); - -if (entityFieldsToLoad == null) - result.string(idsForEmptyResult); -else -{ - //Read the values of all available entity fields and write the fieldname7value combination - //as key/value pairs into an object. This is used to trigger the scan for duplicates - - vars.get("$field.NAME") - vars.get("$field.STANDARD_CITY"); - vars.get("$field.STANDARD_ZIP"); - vars.get("$field.STANDARD_ADDRESS"); - - var allFieldsToLoad = entityFieldsToLoad.entityFields.concat(entityFieldsToLoad.entityIdField); - allFieldsToLoad.forEach(function (field) - { - var fieldValue = vars.get("$field." + field); - if (fieldValue) - valuesToCheck[field] = fieldValue; - }); - - var scanResults = []; - - //don't search if only the id field has a value - var fieldsToCheck = Object.keys(valuesToCheck); - if (!(fieldsToCheck.length === 0 || (fieldsToCheck.length === 1 && entityFieldsToLoad.entityIdField in valuesToCheck))) - scanResults = DuplicateScannerUtils.scanForDuplicates(scannerName, targetEntity, valuesToCheck, null) || []; - - var duplicateIds = scanResults.map(function (scanResult) - { - return scanResult[indexsearch.FIELD_ID]; - }); - - /* - * To achieve that if there are no duplicates, no contacts should be shown and therefore returned by the - * recordcontainer, an invalid id gets returned. It then is used in the conditionProcess to load the duplicates. - * Because of its invalidity, no records are shown. - */ - if (duplicateIds.length == 0) - result.string(idsForEmptyResult); - else - result.string(JSON.stringify(duplicateIds)); -} \ No newline at end of file +var duplicateIds = DuplicateScannerUtils.getDuplicateIdsByEntityVars("Organisation_entity"); +result.string(JSON.stringify(duplicateIds)); diff --git a/entity/Organisation_entity/initFilterProcess.js b/entity/Organisation_entity/initFilterProcess.js index 936a623f599055fc4fb5fa81c850fc7d323bff22..afecc1ede02d6f412681545610696b1efa17dc90 100644 --- a/entity/Organisation_entity/initFilterProcess.js +++ b/entity/Organisation_entity/initFilterProcess.js @@ -5,15 +5,10 @@ import("KeywordRegistry_basic"); import("system.result"); var filter = vars.get("$param.FilterPreSet_param"); - -var res; -if (filter) - res = filter; -else if (vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FILTER) +if(!filter && vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FILTER) { var statusInactive = $KeywordRegistry.contactStatus$inactive(); - - filter = { + filter = JSON.stringify({ type: "group", operator: "AND", childs: [{ @@ -24,9 +19,9 @@ else if (vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FIL key: statusInactive, value: KeywordUtils.getViewValue($KeywordRegistry.contactStatus(), statusInactive) }] - }; - res = JSON.stringify(filter); + }); +} +if(filter) +{ + result.string(filter); } - -if (res) - result.string(res); \ No newline at end of file diff --git a/entity/Organisation_entity/recordcontainers/db/conditionProcess.js b/entity/Organisation_entity/recordcontainers/db/conditionProcess.js index c2a18dbbe573744f49cb15381c720021078bf01c..49874f5c2e3e1a20e79875094534c16984b1dc4c 100644 --- a/entity/Organisation_entity/recordcontainers/db/conditionProcess.js +++ b/entity/Organisation_entity/recordcontainers/db/conditionProcess.js @@ -10,7 +10,6 @@ import("Sql_lib"); import("Context_lib"); var cond = newWhere(); -var onlyShowContactIds = JSON.parse(vars.get("$param.OnlyShowContactIds_param")); // filter privat company if it is not needed if (vars.getString("$param.WithPrivate_param") != "true") @@ -45,7 +44,13 @@ if (vars.exists("$param.OnlyActive_param") && vars.get("$param.OnlyActive_param" cond.and("CONTACT.STATUS", $KeywordRegistry.contactStatus$active()) cond.andIfSet("ORGANISATION.KIND", "$param.OrganisationType_param"); -cond.andIfSet("CONTACT.CONTACTID", onlyShowContactIds, SqlBuilder.IN()) + +if (vars.exists("$param.OnlyShowContactIds_param") && typeof vars.get("$param.OnlyShowContactIds_param") == "string") +{ + var includedContacts = JSON.parse(vars.getString("$param.OnlyShowContactIds_param")); + includedContacts.push("-"); + cond.and("CONTACT.CONTACTID", includedContacts, SqlBuilder.IN()); +} if (vars.exists("$param.OnlyOwnSupervised_param") && vars.get("$param.OnlyOwnSupervised_param") == "true") { diff --git a/entity/Organisation_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js b/entity/Organisation_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..b9c669adb7ced1a46123a038d7cf1385e57731f6 --- /dev/null +++ b/entity/Organisation_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js @@ -0,0 +1,16 @@ +import("system.result"); +import("Sql_lib"); +import("system.vars"); +import("DuplicateScanner_lib"); + +var condOp = SqlUtils.getSqlConditionalOperator(vars.get("$local.operator")); + +result.string(newWhere( + "CONTACT.CONTACTID", + DuplicateScannerUtils.getDuplicateConditionalListSql( + ["Organisation_entity"], + vars.get("$local.rawvalue"), + condOp + ), + SqlBuilder.IN() +)); diff --git a/entity/Organisation_entity/recordcontainers/db/onDBDelete.js b/entity/Organisation_entity/recordcontainers/db/onDBDelete.js index 00e50690d485216c4b4956f416b688ab433e68a8..320f4e3329fa4fd7195da03495b3cd7e5812a9d6 100644 --- a/entity/Organisation_entity/recordcontainers/db/onDBDelete.js +++ b/entity/Organisation_entity/recordcontainers/db/onDBDelete.js @@ -7,7 +7,7 @@ import("Attribute_lib"); // TODO: enable when duplicate-module is finalized let contactId = vars.get("$field.CONTACTID"); -DuplicateScannerUtils.deleteCachedDuplicate(contactId); +DuplicateScannerUtils.deleteHasDuplicateEntries("Organisation_entity", [contactId]); new AttributeRelationQuery(contactId, null, ContextUtils.getCurrentContextId()) .deleteAllAttributes(); diff --git a/entity/Organisation_entity/recordcontainers/db/onDBInsert.js b/entity/Organisation_entity/recordcontainers/db/onDBInsert.js index 60299cd7d4eda2ceed743571cec9cbda912ad318..c8db8fdecb72ba18f14ae3fd3a5bd05223ac2396 100644 --- a/entity/Organisation_entity/recordcontainers/db/onDBInsert.js +++ b/entity/Organisation_entity/recordcontainers/db/onDBInsert.js @@ -1,5 +1,8 @@ import("system.vars"); import("Workflow_lib"); +import("DuplicateScanner_lib"); + +DuplicateScannerUtils.updateHasDuplicateEntry("Organisation_entity"); //start the execution in afterOperatingState, because here the dataset is not yet inserted vars.set("$context.workflowQueue", {}); diff --git a/entity/Organisation_entity/recordcontainers/db/onDBUpdate.js b/entity/Organisation_entity/recordcontainers/db/onDBUpdate.js index 9f0337793c84207f61013809345f829d1f874671..737768cc8678c59374d99e790f4956812e3661db 100644 --- a/entity/Organisation_entity/recordcontainers/db/onDBUpdate.js +++ b/entity/Organisation_entity/recordcontainers/db/onDBUpdate.js @@ -6,6 +6,9 @@ import("Communication_lib"); import("Entity_lib"); import("Workflow_lib"); import("KeywordRegistry_basic"); +import("DuplicateScanner_lib"); + +DuplicateScannerUtils.updateHasDuplicateEntry("Organisation_entity"); // TODO: this is a workaround for missing possibility to react on changes of fields not connected to record Contqainer #1030023 var rowdata = vars.get("$local.rowdata"); diff --git a/entity/Person_entity/Person_entity.aod b/entity/Person_entity/Person_entity.aod index f294673d397ad7e5b12ab7f16821daa5d3c0d75b..aeb643112fd2fc0bc45ff441ee624fa59125fc8a 100644 --- a/entity/Person_entity/Person_entity.aod +++ b/entity/Person_entity/Person_entity.aod @@ -979,10 +979,6 @@ <name>OnlyShowContactIds_param</name> <valueProcess>%aditoprj%/entity/Person_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js</valueProcess> </entityParameter> - <entityParameter> - <name>ExcludedContactIds_param</name> - <valueProcess>%aditoprj%/entity/Person_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js</valueProcess> - </entityParameter> </children> </entityConsumer> <entityField> @@ -1198,14 +1194,6 @@ <entityProvider> <name>NonselfDuplicates</name> <documentation>%aditoprj%/entity/Person_entity/entityfields/nonselfduplicates/documentation.adoc</documentation> - <dependencies> - <entityDependency> - <name>3a4352e2-9686-4c52-9d01-dbfad8c68ea7</name> - <entityName>Duplicates_entity</entityName> - <fieldName>DuplicatePersonsConsumer</fieldName> - <isConsumer v="false" /> - </entityDependency> - </dependencies> <children> <entityParameter> <name>ContactId_param</name> @@ -1422,22 +1410,13 @@ <stateProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/stateProcess.js</stateProcess> </entityActionField> <entityActionField> - <name>IgnoreDuplicate</name> - <title>Ignore Duplicate</title> - <onActionProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js</onActionProcess> - <isMenuAction v="true" /> + <name>IgnoreDuplicates</name> + <title>Ignore Duplicate(s)</title> + <onActionProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js</onActionProcess> <isObjectAction v="false" /> <isSelectionAction v="true" /> <iconId>VAADIN:CLOSE</iconId> - <stateProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js</stateProcess> - </entityActionField> - <entityActionField> - <name>IgnoreWholeCluster</name> - <title>Ignore whole cluster</title> - <onActionProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js</onActionProcess> - <isObjectAction v="false" /> - <iconId>VAADIN:CLOSE</iconId> - <stateProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js</stateProcess> + <stateProcess>%aditoprj%/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js</stateProcess> </entityActionField> </children> </entityActionGroup> @@ -1498,6 +1477,10 @@ <iconId>VAADIN:CURLY_BRACKETS</iconId> <stateProcess>%aditoprj%/entity/Person_entity/entityfields/openadminview/stateProcess.js</stateProcess> </entityActionField> + <entityParameter> + <name>FilterPreSet_param</name> + <expose v="true" /> + </entityParameter> </entityFields> <recordContainers> <dbRecordContainer> @@ -1825,6 +1808,13 @@ <filterConditionProcess>%aditoprj%/entity/Person_entity/recordcontainers/db/filterextensions/supervisorassignment/filterConditionProcess.js</filterConditionProcess> <filtertype>BASIC</filtertype> </filterExtension> + <filterExtension> + <name>Duplicates_filter</name> + <title>Duplicates</title> + <contentType>NUMBER</contentType> + <filterConditionProcess>%aditoprj%/entity/Person_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js</filterConditionProcess> + <filtertype>BASIC</filtertype> + </filterExtension> <filterExtension> <name>Communication_Mail_filter</name> <title>Communication: Mail</title> diff --git a/entity/Person_entity/entityfields/deletefunction/onActionProcess.js b/entity/Person_entity/entityfields/deletefunction/onActionProcess.js index 083e2d8b0d3d0649e0984e06b3947a6ae709fd3e..324c438c9d7953e5177748efa8679fe55ab0e8f6 100644 --- a/entity/Person_entity/entityfields/deletefunction/onActionProcess.js +++ b/entity/Person_entity/entityfields/deletefunction/onActionProcess.js @@ -21,7 +21,7 @@ if(contactIds > 1) entities.deleteRow(config); - DuplicateScannerUtils.deleteCachedDuplicate(contactId); + DuplicateScannerUtils.deleteHasDuplicateEntries("Person_entity", [contactId]); new AttributeRelationQuery(contactId, null, ContextUtils.getCurrentContextId()) .deleteAllAttributes(); diff --git a/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js deleted file mode 100644 index c9289cbd0ef896d7dbcf5eae668bac8a87e861da..0000000000000000000000000000000000000000 --- a/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/onActionProcess.js +++ /dev/null @@ -1,10 +0,0 @@ -import("system.neon"); -import("system.vars"); -import("DuplicateScanner_lib"); - -let sourceContactId = vars.get("$param.DuplicateCurrentContactId_param"); -let selectedContactId = vars.get("$sys.selection"); -let clusterId = DuplicateScannerUtils.getClusterId(sourceContactId); -DuplicateScannerUtils.createUnrelatedDuplicateRelation(sourceContactId, selectedContactId, clusterId); - -neon.refreshAll(); \ No newline at end of file diff --git a/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..f3e13a73c4ee312f805af83bcf3fc3ddd468fde5 --- /dev/null +++ b/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/onActionProcess.js @@ -0,0 +1,10 @@ +import("system.neon"); +import("system.vars"); +import("system.result"); +import("DuplicateScanner_lib"); + +var selection = vars.get("$sys.selection"); +var contactId = vars.get("$param.DuplicateCurrentContactId_param"); +DuplicateScannerUtils.ignoreDuplicates("Person_entity", contactId, selection); +// Update the rows, because UNRELATEDDUPLICATES wont trigger a refresh even with write entities +neon.refreshAll(); diff --git a/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js similarity index 100% rename from entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicate/stateProcess.js rename to entity/Person_entity/entityfields/duplicateactions/children/ignoreduplicates/stateProcess.js diff --git a/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js deleted file mode 100644 index f74997cb1d72c3baa56b1dd12a42ba77eb782769..0000000000000000000000000000000000000000 --- a/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/onActionProcess.js +++ /dev/null @@ -1,28 +0,0 @@ -import("system.logging"); -import("system.neon"); -import("system.vars"); -import("DuplicateScanner_lib"); -import("system.notification"); - -let contactId = vars.get("$field.CONTACTID"); -logging.log("contactId -> " + contactId); -let clusterId = DuplicateScannerUtils.getClusterId(contactId); -logging.log("clusterId -> " + clusterId); -let duplicateContactIdsInClusterRay = DuplicateScannerUtils.getCachedDuplicatesForClusterId(clusterId) -logging.log("duplicateContactIdsInClusterRay -> " + duplicateContactIdsInClusterRay); -if(duplicateContactIdsInClusterRay.length > 1) -{ - let referenceDuplicateId = duplicateContactIdsInClusterRay[0]; - for (let i = 1; i < duplicateContactIdsInClusterRay.length; i++) - { - DuplicateScannerUtils.createUnrelatedDuplicateRelation(referenceDuplicateId, duplicateContactIdsInClusterRay[i], clusterId); - } - neon.refreshAll(); - -// var params = {}; -// params["TargetEntity"] = "Person_entity"; -// -// neon.openContext("Duplicates", "DuplicatesOverview_view", null, neon.OPERATINGSTATE_VIEW, params); -} - - diff --git a/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js deleted file mode 100644 index fdd913ad06bf2815da3127d405d21b258c4ad795..0000000000000000000000000000000000000000 --- a/entity/Person_entity/entityfields/duplicateactions/children/ignorewholecluster/stateProcess.js +++ /dev/null @@ -1,11 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.neon"); -import("system.result"); - -//Actions to show in the duplicates view inside the persons main view - -let actionState = vars.get("$param.DuplicateActionsControl_param"); - -if(actionState != null && actionState != "2")//todo replace with keyword - result.string(neon.COMPONENTSTATE_INVISIBLE); \ No newline at end of file diff --git a/entity/Person_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js index d5f864db525910dd0635ee2ab84279e3346599b6..1d0937831bc491d5758f88f732caec553377bcdc 100644 --- a/entity/Person_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js +++ b/entity/Person_entity/entityfields/duplicateactions/children/integratecurrentintoselectedaction/onActionProcess.js @@ -2,13 +2,13 @@ import("system.tools"); import("Employee_lib"); import("system.vars"); import("system.neon"); -import("DuplicateScanner_lib"); +import("DuplicateMerge_lib"); var sourceContactId = vars.get("$param.DuplicateCurrentContactId_param"); var targetContactId = vars.get("$sys.selection")[0]; //todo the actual merge ought to happen in a separate view where the contact infos can be merged manually by the user. -var mergeSuccess = DuplicateScannerUtils.mergePerson(sourceContactId, targetContactId); +var mergeSuccess = DuplicateMergeUtils.mergePerson(sourceContactId, targetContactId); if(mergeSuccess) { @@ -21,7 +21,7 @@ if(mergeSuccess) var currentContactId = EmployeeUtils.getCurrentContactId(); if(currentContactId == null) currentContactId = ""; - DuplicateScannerUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Person"); + DuplicateMergeUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Person"); neon.openContext("Person", "PersonMain_view", [targetContactId], neon.OPERATINGSTATE_VIEW, null) } \ No newline at end of file diff --git a/entity/Person_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js b/entity/Person_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js index 6a9f2ac39a7238d7dd5fbaec53e41c57eabec241..f2662a704978644b3722648b20a35ed0bbe1ccfe 100644 --- a/entity/Person_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js +++ b/entity/Person_entity/entityfields/duplicateactions/children/integrateselectedintocurrentaction/onActionProcess.js @@ -5,14 +5,14 @@ import("KeywordRegistry_basic"); import("ActivityTask_lib"); import("system.vars"); import("system.neon"); -import("DuplicateScanner_lib"); +import("DuplicateMerge_lib"); var targetContactId = vars.get("$param.DuplicateCurrentContactId_param"); var sourceContactIdArray = vars.get("$sys.selection"); var sourceContactId = sourceContactIdArray[0]; //todo the actual merge ought to happen in a separate view where the contact infos can be merged manually by the user. -var mergeSuccess = DuplicateScannerUtils.mergePerson(sourceContactId, targetContactId); +var mergeSuccess = DuplicateMergeUtils.mergePerson(sourceContactId, targetContactId); if(mergeSuccess) { @@ -25,7 +25,7 @@ if(mergeSuccess) var currentContactId = EmployeeUtils.getCurrentContactId(); if(currentContactId == null) currentContactId = ""; - DuplicateScannerUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Person"); + DuplicateMergeUtils.createMergeSuccessActivity(sourceContactId, targetContactId, currentContactId, "Person"); //neon.refresh() with no fields will refresh the current image (and all sub images) but NOT the preview. neon.refreshAll() would refresh both, //why it would lead to an error because it's trying to load the already opened preview of the duplicateContact which just got deleted //and does not exist any more which results in an exception diff --git a/entity/Person_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js b/entity/Person_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js deleted file mode 100644 index 849632882bbabd4e65dfb25c7dc984366bde5116..0000000000000000000000000000000000000000 --- a/entity/Person_entity/entityfields/selfduplicatesuncached/children/excludedcontactids_param/valueProcess.js +++ /dev/null @@ -1,7 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("system.result"); -import("DuplicateScanner_lib"); - -let unrelatedIds = DuplicateScannerUtils.getUnrelatedRelationsForDuplicate(vars.get("$field.CONTACTID")); -result.string(JSON.stringify(unrelatedIds)); \ No newline at end of file diff --git a/entity/Person_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js b/entity/Person_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js index 8fa370156eae601ebddb1f7b6f024650a156de52..717e89e63b85e1e0d34414b5d1edabf0b4e20b6d 100644 --- a/entity/Person_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js +++ b/entity/Person_entity/entityfields/selfduplicatesuncached/children/onlyshowcontactids_param/valueProcess.js @@ -1,60 +1,5 @@ -import("system.indexsearch"); -import("system.project"); -import("system.logging"); -import("system.vars"); import("DuplicateScanner_lib"); import("system.result"); -var scannerName = "PersonDuplicates"; -var targetEntity = "Person_entity"; -var valuesToCheck = {}; -var entityFieldsToLoad = DuplicateScannerUtils.getEntityFieldObjectFromConfig(scannerName, targetEntity); - -var idsForEmptyResult = JSON.stringify(["nodata"]); - -if (entityFieldsToLoad == null) - result.string(idsForEmptyResult); -else -{ - //Read the values of all available entity fields and write the fieldname7value combination - //as key/value pairs into an object. This is used to trigger the scan for duplicates - - vars.get("$field.STANDARD_CITY"); - vars.get("$field.STANDARD_ZIP"); - vars.get("$field.STANDARD_ADDRESS"); - vars.get("$field.FIRSTNAME"); - vars.get("$field.LASTNAME"); - vars.get("$field.PersAddresses.insertedRows") - vars.get("$field.PersAddresses.changedRows") - vars.get("$field.PersAddresses.deletedRows") - - var allFieldsToLoad = entityFieldsToLoad.entityFields.concat(entityFieldsToLoad.entityIdField); - allFieldsToLoad.forEach(function (field) - { - var fieldValue = vars.get("$field." + field); - if (fieldValue) - valuesToCheck[field] = fieldValue; - }); - - var scanResults = []; - - //don't search if only the id field has a value - var fieldsToCheck = Object.keys(valuesToCheck); - if (!(fieldsToCheck.length === 0 || (fieldsToCheck.length === 1 && entityFieldsToLoad.entityIdField in valuesToCheck))) - scanResults = DuplicateScannerUtils.scanForDuplicates(scannerName, targetEntity, valuesToCheck, null) || []; - - var duplicateIds = scanResults.map(function (duplicate) - { - return duplicate[indexsearch.FIELD_ID]; - }); - - /* - * To achieve that if there are no duplicates, no contacts should be shown and therefore returned by the - * recordcontainer, an invalid id gets returned. It then is used in the conditionProcess to load the duplicates. - * Because of its invalidity, no records are shown. - */ - if (duplicateIds.length == 0) - result.string(idsForEmptyResult); - else - result.string(JSON.stringify(duplicateIds)); -} \ No newline at end of file +var duplicateIds = DuplicateScannerUtils.getDuplicateIdsByEntityVars("Person_entity"); +result.string(JSON.stringify(duplicateIds)); diff --git a/entity/Person_entity/initFilterProcess.js b/entity/Person_entity/initFilterProcess.js index f64ef8977cc3f20a910f104d6196bfe90c43fc34..afecc1ede02d6f412681545610696b1efa17dc90 100644 --- a/entity/Person_entity/initFilterProcess.js +++ b/entity/Person_entity/initFilterProcess.js @@ -1,14 +1,14 @@ -import("system.entities"); +import("system.neon"); +import("system.vars"); import("Keyword_lib"); import("KeywordRegistry_basic"); import("system.result"); -import("system.vars"); -import("system.neon"); -if (vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FILTER) +var filter = vars.get("$param.FilterPreSet_param"); +if(!filter && vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FILTER) { var statusInactive = $KeywordRegistry.contactStatus$inactive(); - var filter = { + filter = JSON.stringify({ type: "group", operator: "AND", childs: [{ @@ -19,7 +19,9 @@ if (vars.get("$sys.presentationmode") === neon.CONTEXT_PRESENTATIONMODE_FILTER) key: statusInactive, value: KeywordUtils.getViewValue($KeywordRegistry.contactStatus(), statusInactive) }] - }; - - result.string(JSON.stringify(filter)); -} \ No newline at end of file + }); +} +if(filter) +{ + result.string(filter); +} diff --git a/entity/Person_entity/recordcontainers/db/conditionProcess.js b/entity/Person_entity/recordcontainers/db/conditionProcess.js index 338811158360900835c45a56b055904518f65ed6..c5dc0dc223f41628d4350cb5502a001adf29f4ec 100644 --- a/entity/Person_entity/recordcontainers/db/conditionProcess.js +++ b/entity/Person_entity/recordcontainers/db/conditionProcess.js @@ -22,11 +22,11 @@ if (vars.exists("$param.ExcludedContactIds_param") && vars.get("$param.ExcludedC cond.andIfSet("CONTACT.CONTACTID", excludedContacts, SqlBuilder.NOT_IN()); } -var onlyShowContactIds = JSON.parse(vars.get("$param.OnlyShowContactIds_param")); - -if (onlyShowContactIds != null && onlyShowContactIds.length > 0) +if (vars.exists("$param.OnlyShowContactIds_param") && typeof vars.get("$param.OnlyShowContactIds_param") == "string") { - cond.and("CONTACT.CONTACTID", onlyShowContactIds, SqlBuilder.IN()); + var includedContacts = JSON.parse(vars.getString("$param.OnlyShowContactIds_param")); + includedContacts.push("-"); + cond.and("CONTACT.CONTACTID", includedContacts, SqlBuilder.IN()); } if (vars.exists("$param.OnlyOwnSupervised_param") && vars.get("$param.OnlyOwnSupervised_param") == "true") diff --git a/entity/Person_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js b/entity/Person_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js new file mode 100644 index 0000000000000000000000000000000000000000..d90f8920fb9dbc549e79036735aec07095381355 --- /dev/null +++ b/entity/Person_entity/recordcontainers/db/filterextensions/duplicates_filter/filterConditionProcess.js @@ -0,0 +1,16 @@ +import("system.result"); +import("Sql_lib"); +import("system.vars"); +import("DuplicateScanner_lib"); + +var condOp = SqlUtils.getSqlConditionalOperator(vars.get("$local.operator")); + +result.string(newWhere( + "CONTACT.CONTACTID", + DuplicateScannerUtils.getDuplicateConditionalListSql( + ["Person_entity"], + vars.get("$local.rawvalue"), + condOp + ), + SqlBuilder.IN() +)); diff --git a/entity/Person_entity/recordcontainers/db/onDBDelete.js b/entity/Person_entity/recordcontainers/db/onDBDelete.js index 56d4461823493222635f99ab42de361ec497f838..26d095805ed1fc2a0f944c3a99c4f7e49475057a 100644 --- a/entity/Person_entity/recordcontainers/db/onDBDelete.js +++ b/entity/Person_entity/recordcontainers/db/onDBDelete.js @@ -6,7 +6,7 @@ import("system.vars"); import("DuplicateScanner_lib"); var contactId = vars.get("$field.CONTACTID"); -DuplicateScannerUtils.deleteCachedDuplicate(contactId); +DuplicateScannerUtils.deleteHasDuplicateEntries("Person_entity", [contactId]); new AttributeRelationQuery(contactId, null, ContextUtils.getCurrentContextId()) .deleteAllAttributes(); diff --git a/entity/Person_entity/recordcontainers/db/onDBInsert.js b/entity/Person_entity/recordcontainers/db/onDBInsert.js index 3385c553d10f5332ad4c946ea239bf7bcdf4b6bc..8d1c5ddea0bed7f36862a8423508f130794a3a07 100644 --- a/entity/Person_entity/recordcontainers/db/onDBInsert.js +++ b/entity/Person_entity/recordcontainers/db/onDBInsert.js @@ -4,6 +4,9 @@ import("Sql_lib"); import("system.db"); import("DataPrivacy_lib"); import("system.vars"); +import("DuplicateScanner_lib"); + +DuplicateScannerUtils.updateHasDuplicateEntry("Person_entity"); //let targetEntity = "Person_entity"; let contactId = vars.get("$local.uid"); diff --git a/entity/Person_entity/recordcontainers/db/onDBUpdate.js b/entity/Person_entity/recordcontainers/db/onDBUpdate.js index 0c29899fd4393c2528219ef9dc213eb151d10b18..100f1f714f83179a745ab4c332591ab988c0e60b 100644 --- a/entity/Person_entity/recordcontainers/db/onDBUpdate.js +++ b/entity/Person_entity/recordcontainers/db/onDBUpdate.js @@ -8,6 +8,9 @@ import("Person_lib"); import("Communication_lib"); import("Entity_lib"); import("StandardObject_lib"); +import("DuplicateScanner_lib"); + +DuplicateScannerUtils.updateHasDuplicateEntry("Person_entity"); var localChanged = vars.get("$local.changed"); var orgChanged = false; diff --git a/entity/QuickEntry_entity/QuickEntry_entity.aod b/entity/QuickEntry_entity/QuickEntry_entity.aod index 9ce66331b3242cbbba9001036cd3e65dc920d156..2c460f59b8748cc6d79d1046db867b5d3614130c 100644 --- a/entity/QuickEntry_entity/QuickEntry_entity.aod +++ b/entity/QuickEntry_entity/QuickEntry_entity.aod @@ -237,6 +237,7 @@ <entityParameter> <name>ContactIds_param</name> <valueProcess>%aditoprj%/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/valueProcess.js</valueProcess> + <documentation>%aditoprj%/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/documentation.adoc</documentation> </entityParameter> <entityParameter> <name>WithPrivatePersons_param</name> diff --git a/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/documentation.adoc b/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/documentation.adoc new file mode 100644 index 0000000000000000000000000000000000000000..667cdd59e70d81d319b110ff44e24478616dae1d --- /dev/null +++ b/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/documentation.adoc @@ -0,0 +1,2 @@ +returns a list of duplicate ids +the index will be searched via the values entered in the edit frame \ No newline at end of file diff --git a/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/valueProcess.js b/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/valueProcess.js index 68ef6879ab48622e909ec092bb0314156746f96e..c342c88f4c3327fa7a3d553daebeb3e5980d6c18 100644 --- a/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/valueProcess.js +++ b/entity/QuickEntry_entity/entityfields/organdpersduplicates/children/contactids_param/valueProcess.js @@ -2,6 +2,7 @@ import("system.indexsearch"); import("system.vars"); import("DuplicateScanner_lib"); import("system.result"); +import("IndexSearch_lib"); //trigger refresh vars.get("$field.FIRSTNAME"); @@ -11,11 +12,11 @@ var uid = vars.get("$field.UID"); var idsForEmptyResult = JSON.stringify(["nodata"]); var duplicateScans = []; -duplicateScans.push(["PersonDuplicates", "Person_entity", {"CONTACTID" : uid}]); +duplicateScans.push(["Person_entity", {"CONTACTID" : uid}]); vars.get("$field.Contacts.insertedRows").forEach(function (contact) { - duplicateScans.push(["PersonDuplicates", "Person_entity", contact]); + duplicateScans.push(["Person_entity", contact]); }); var organisationName = vars.get("$field.ORGANISATION_NAME"); @@ -34,7 +35,7 @@ if (organisationName || firstOrganisationAddress) address = firstOrganisationAddress["ADDRESS"]; } - duplicateScans.push(["OrganisationDuplicates", "Organisation_entity", { + duplicateScans.push(["Organisation_entity", { "CONTACTID" : uid, "NAME" : organisationName, "STANDARD_CITY" : city, @@ -43,9 +44,9 @@ if (organisationName || firstOrganisationAddress) }]); } -var duplicates = duplicateScans.reduce(function (duplicateArr, [scannerName, entity, fieldValues]) +var duplicates = duplicateScans.reduce(function (duplicateArr, [entity, fieldValues]) { - return duplicateArr.concat(_getDuplicates(scannerName, entity, fieldValues)); + return duplicateArr.concat(_getDuplicates(entity, fieldValues)); }, []); if (duplicates.length === 0) @@ -54,34 +55,22 @@ else result.string(JSON.stringify(duplicates)); -function _getDuplicates (pScannerName, pEntity, pEntityFieldValues) +function _getDuplicates(pEntity, pEntityFieldValues) { - var fieldsToLoad = DuplicateScannerUtils.getEntityFieldObjectFromConfig(pScannerName, pEntity); - if (fieldsToLoad == null) - return []; + var scanner = DuplicateScannerUtils.getScannerByEntity(pEntity); + var indexsearchFilter = IndexsearchFilterUtils.fromFilter(scanner.filter); + var entityFields = indexsearchFilter.getFields(); + entityFields.add(scanner.idField); var valuesToCheck = {}; - - var allFieldsToLoad = fieldsToLoad.entityFields.concat(fieldsToLoad.entityIdField); - allFieldsToLoad.forEach(function (field) - { + entityFields.forEach(function(field) { var fieldValue = field in pEntityFieldValues ? pEntityFieldValues[field] : vars.get("$field." + field); - if (fieldValue) - valuesToCheck[field] = fieldValue; - }); - - //don't search if only the id field has a value - var fieldsToCheck = Object.keys(valuesToCheck); - if (fieldsToCheck.length === 0 || (fieldsToCheck.length === 1 && fieldsToLoad.entityIdField in valuesToCheck)) - return []; - - var scanResults = DuplicateScannerUtils.scanForDuplicates(pScannerName, pEntity, valuesToCheck, null) || []; - var duplicateIds = scanResults.map(function (duplicate) - { - return duplicate[indexsearch.FIELD_ID]; + valuesToCheck[field] = fieldValue || ""; }); - return duplicateIds; + var indexPattern = indexsearchFilter.buildQuery(valuesToCheck); + var duplicateIds = DuplicateScannerUtils.getDuplicateIds(pEntity, indexPattern, valuesToCheck[scanner.idField]); + return DuplicateScannerUtils.filterIgnored(pEntity, valuesToCheck[scanner.idField], duplicateIds); } diff --git a/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod b/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod index 553c663568b837f780f938c8d79c797667171b8c..ecda5358640212e13b5362823443b87e9b873edc 100644 --- a/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod +++ b/language/_____LANGUAGE_EXTRA/_____LANGUAGE_EXTRA.aod @@ -8076,6 +8076,12 @@ <entry> <key>EML files can't be edited here. You can download, edit and reupload the template to change the content.</key> </entry> + <entry> + <key>The duplicate row corrosponding to %0 has been rebuild</key> + </entry> + <entry> + <key>Duplicaterow rebuild</key> + </entry> <entry> <key>Edit HTML</key> </entry> diff --git a/language/_____LANGUAGE_de/_____LANGUAGE_de.aod b/language/_____LANGUAGE_de/_____LANGUAGE_de.aod index e14370eefdc66607d7c1e05cc905124d4bf4f150..13994529638e9a72e61a9e29b07934b78e31c0b8 100644 --- a/language/_____LANGUAGE_de/_____LANGUAGE_de.aod +++ b/language/_____LANGUAGE_de/_____LANGUAGE_de.aod @@ -10685,6 +10685,14 @@ Bitte Datumseingabe prüfen</value> <entry> <key>Calendar week</key> </entry> + <entry> + <key>Duplicaterow rebuild</key> + <value>Dubletten neu berechnet</value> + </entry> + <entry> + <key>The duplicate row corrosponding to %0 has been rebuild</key> + <value>Die Dupletten des %0 filters wurden neu berechnet</value> + </entry> <entry> <key>HTML Editor</key> </entry> diff --git a/language/_____LANGUAGE_en/_____LANGUAGE_en.aod b/language/_____LANGUAGE_en/_____LANGUAGE_en.aod index 9aa216250971ccfd30c03fa21ce6d2fe94f52a90..20d77b0bfae3f3079baf156d5d990e5c36fed1fa 100644 --- a/language/_____LANGUAGE_en/_____LANGUAGE_en.aod +++ b/language/_____LANGUAGE_en/_____LANGUAGE_en.aod @@ -8178,6 +8178,12 @@ <entry> <key>EML files can't be edited here. You can download, edit and reupload the template to change the content.</key> </entry> + <entry> + <key>The duplicate row corrosponding to %0 has been rebuild</key> + </entry> + <entry> + <key>Duplicaterow rebuild</key> + </entry> <entry> <key>Edit HTML</key> </entry> diff --git a/neonContext/DuplicateScannerResultFieldConfig/DuplicateScannerResultFieldConfig.aod b/neonContext/DuplicateScannerResultFieldConfig/DuplicateScannerResultFieldConfig.aod deleted file mode 100644 index 79d97363fbb75d25de88168f1dbd76713df82073..0000000000000000000000000000000000000000 --- a/neonContext/DuplicateScannerResultFieldConfig/DuplicateScannerResultFieldConfig.aod +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonContext xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.1" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonContext/1.1.1"> - <name>DuplicateScannerResultFieldConfig</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <entity>DuplicateScannerResultFieldConfig_entity</entity> - <references> - <neonViewReference> - <name>4bab12e9-c4c3-450f-bfcf-8ecbec1f994c</name> - <view>DuplicateScannerResultFieldConfigEdit_view</view> - </neonViewReference> - </references> -</neonContext> diff --git a/neonContext/Duplicates/Duplicates.aod b/neonContext/Duplicates/Duplicates.aod deleted file mode 100644 index 5de0f3a1913c9bb49aecba2c8ea9998e23ad1faf..0000000000000000000000000000000000000000 --- a/neonContext/Duplicates/Duplicates.aod +++ /dev/null @@ -1,50 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonContext xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.1" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonContext/1.1.1"> - <name>Duplicates</name> - <title>Duplicates</title> - <majorModelMode>DISTRIBUTED</majorModelMode> - <filterView>DuplicatesOverview_view</filterView> - <entity>Duplicates_entity</entity> - <references> - <neonViewReference> - <name>d8994f4c-3abf-4ff1-8bdb-f12e527df655</name> - <view>PersonDuplicatesFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>de287a3b-1d6a-435c-b65e-d833c1751edf</name> - <view>DuplicatesOverview_view</view> - </neonViewReference> - <neonViewReference> - <name>5bdeb931-4e69-4520-bbc9-94fb17679331</name> - <view>PersonDublicatesTab_view</view> - </neonViewReference> - <neonViewReference> - <name>70cb2e96-5bc1-46cb-982f-b27db5d143d2</name> - <view>PersonClusterMain_view</view> - </neonViewReference> - <neonViewReference> - <name>7cdb6ca7-e99d-4eb6-897a-0953157bf62f</name> - <view>DuplicatesUnrelatedCluster_view</view> - </neonViewReference> - <neonViewReference> - <name>4b9a1a26-e14f-4246-b474-8bfb3e3a95b0</name> - <view>OrganisationDuplicatesTab_view</view> - </neonViewReference> - <neonViewReference> - <name>f9b46eab-7417-4f61-b7cd-dc772c04ddc0</name> - <view>OrganisationDuplicatesFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>d68a425a-037a-4725-8dc1-b0afac277bdd</name> - <view>OrganisationUnrelatedDuplicates_view</view> - </neonViewReference> - <neonViewReference> - <name>4c1fc406-1c83-4fb6-9059-e45ff5c80756</name> - <view>OrganisationClusterMain_view</view> - </neonViewReference> - <neonViewReference> - <name>2400acfd-50e4-472d-b69c-368b9d25b6c6</name> - <view>PersonClusterPreview_view</view> - </neonViewReference> - </references> -</neonContext> diff --git a/neonContext/DuplicatesUnrelated/DuplicatesUnrelated.aod b/neonContext/DuplicatesUnrelated/DuplicatesUnrelated.aod deleted file mode 100644 index 56bf4fd8574682d13f54fd2c5628fde361465b46..0000000000000000000000000000000000000000 --- a/neonContext/DuplicatesUnrelated/DuplicatesUnrelated.aod +++ /dev/null @@ -1,16 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonContext xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.1" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonContext/1.1.1"> - <name>DuplicatesUnrelated</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <entity>DuplicatesUnrelated_entity</entity> - <references> - <neonViewReference> - <name>0b5cbd31-cfa1-4ee8-8bd2-b4772ac95953</name> - <view>DuplicatesUnrelatedPersonFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>433a665e-63ac-4281-8f7c-08603362daf7</name> - <view>DuplicatesUnrelatedOrganisationFilter_view</view> - </neonViewReference> - </references> -</neonContext> diff --git a/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod b/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod index 5a99ae0226ac05f614f190c54732bedc624b4a5c..7c453624af648353eb5b2cab9be287aa609c5de4 100644 --- a/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod +++ b/neonView/DuplicateScannerFilter_view/DuplicateScannerFilter_view.aod @@ -11,6 +11,7 @@ <tableViewTemplate> <name>Filters</name> <entityField>#ENTITY</entityField> + <favoriteActionGroup1>FilterActions</favoriteActionGroup1> <isCreatable v="false" /> <isDeletable v="false" /> <isEditable v="true" /> @@ -28,6 +29,7 @@ <treeTableViewTemplate> <name>Treetable</name> <entityField>#ENTITY</entityField> + <favoriteActionGroup1>FilterActions</favoriteActionGroup1> <isCreatable v="false" /> <isDeletable v="false" /> <columns> diff --git a/neonView/DuplicateScannerResultFieldConfigEdit_view/DuplicateScannerResultFieldConfigEdit_view.aod b/neonView/DuplicateScannerResultFieldConfigEdit_view/DuplicateScannerResultFieldConfigEdit_view.aod deleted file mode 100644 index 9798328f4161f04c56cd02f48166a9dce167306c..0000000000000000000000000000000000000000 --- a/neonView/DuplicateScannerResultFieldConfigEdit_view/DuplicateScannerResultFieldConfigEdit_view.aod +++ /dev/null @@ -1,24 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>DuplicateScannerResultFieldConfigEdit_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <genericMultipleViewTemplate> - <name>ResultFieldsConfig</name> - <autoNewRow v="true" /> - <entityField>#ENTITY</entityField> - <title>Result fields</title> - <columns> - <neonGenericMultipleTableColumn> - <name>e330572c-aa47-4c52-a760-3e8765ce3dd0</name> - <entityField>ENTITY_FIELD_NAME</entityField> - </neonGenericMultipleTableColumn> - </columns> - </genericMultipleViewTemplate> - </children> -</neonView> diff --git a/neonView/DuplicatesUnrelatedCluster_view/DuplicatesUnrelatedCluster_view.aod b/neonView/DuplicatesUnrelatedCluster_view/DuplicatesUnrelatedCluster_view.aod deleted file mode 100644 index 670b2c939dc7b4c793999bf90f30c9503d4c0052..0000000000000000000000000000000000000000 --- a/neonView/DuplicatesUnrelatedCluster_view/DuplicatesUnrelatedCluster_view.aod +++ /dev/null @@ -1,17 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>DuplicatesUnrelatedCluster_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>484990d0-890b-48b8-8618-6f0fab367138</name> - <entityField>DuplicatesUnrelatedPersonConsumer</entityField> - <view>DuplicatesUnrelatedPersonFilter_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/DuplicatesUnrelatedOrganisationFilter_view/DuplicatesUnrelatedOrganisationFilter_view.aod b/neonView/DuplicatesUnrelatedOrganisationFilter_view/DuplicatesUnrelatedOrganisationFilter_view.aod deleted file mode 100644 index 03a42bc169f8edf17595f673f799194149d4eb81..0000000000000000000000000000000000000000 --- a/neonView/DuplicatesUnrelatedOrganisationFilter_view/DuplicatesUnrelatedOrganisationFilter_view.aod +++ /dev/null @@ -1,30 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>DuplicatesUnrelatedOrganisationFilter_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <tableViewTemplate> - <name>UnrelatedOrganisations</name> - <hideContentSearch v="true" /> - <entityField>#ENTITY</entityField> - <isCreatable v="false" /> - <isEditable v="false" /> - <title>Unrelated organisation duplicates</title> - <columns> - <neonTableColumn> - <name>04681f8d-b941-4a66-be50-6ac08d6f52a4</name> - <entityField>SourceDuplicateDescription</entityField> - </neonTableColumn> - <neonTableColumn> - <name>b293ed18-ebf5-474e-8ec2-851a3562b4d5</name> - <entityField>UnrelatedDuplicateDescription</entityField> - </neonTableColumn> - </columns> - </tableViewTemplate> - </children> -</neonView> diff --git a/neonView/DuplicatesUnrelatedPersonFilter_view/DuplicatesUnrelatedPersonFilter_view.aod b/neonView/DuplicatesUnrelatedPersonFilter_view/DuplicatesUnrelatedPersonFilter_view.aod deleted file mode 100644 index 173666ab5f8a8ffb5ded2760fd5da08851e2e209..0000000000000000000000000000000000000000 --- a/neonView/DuplicatesUnrelatedPersonFilter_view/DuplicatesUnrelatedPersonFilter_view.aod +++ /dev/null @@ -1,30 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>DuplicatesUnrelatedPersonFilter_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <tableViewTemplate> - <name>UnrelatedPersons</name> - <hideContentSearch v="true" /> - <entityField>#ENTITY</entityField> - <isCreatable v="false" /> - <isEditable v="false" /> - <title>Unrelated person duplicates</title> - <columns> - <neonTableColumn> - <name>734re984-6a0b-4126-ab49-452e2b54f76d</name> - <entityField>SourceDuplicateDescription</entityField> - </neonTableColumn> - <neonTableColumn> - <name>8615259b-de5b-378u-945d-2ff934ae1b8c</name> - <entityField>UnrelatedDuplicateDescription</entityField> - </neonTableColumn> - </columns> - </tableViewTemplate> - </children> -</neonView> diff --git a/neonView/OrganisationClusterMain_view/OrganisationClusterMain_view.aod b/neonView/OrganisationClusterMain_view/OrganisationClusterMain_view.aod deleted file mode 100644 index 72d99cac595e86b488e878b16d1a700b90746e48..0000000000000000000000000000000000000000 --- a/neonView/OrganisationClusterMain_view/OrganisationClusterMain_view.aod +++ /dev/null @@ -1,23 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>OrganisationClusterMain_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - <direction>HORIZONTAL</direction> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>b25f94bf-9316-4438-be78-b8cf596440b3</name> - <entityField>DuplicateOrganisationsConsumer</entityField> - <view>OrganisationFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>5589bb81-171d-417b-a1b2-975144109d55</name> - <entityField>#ENTITY</entityField> - <view>DuplicatesUnrelatedCluster_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/OrganisationDuplicatesFilter_view/OrganisationDuplicatesFilter_view.aod b/neonView/OrganisationDuplicatesFilter_view/OrganisationDuplicatesFilter_view.aod deleted file mode 100644 index c950c689201ccb9ecaf23a58f879058711e6c866..0000000000000000000000000000000000000000 --- a/neonView/OrganisationDuplicatesFilter_view/OrganisationDuplicatesFilter_view.aod +++ /dev/null @@ -1,32 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>OrganisationDuplicatesFilter_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <tableViewTemplate> - <name>OrganisationDuplicatesTable</name> - <hideContentSearch v="true" /> - <entityField>#ENTITY</entityField> - <favoriteActionGroup1>OrganisationOpenClusterDetailActionGroup</favoriteActionGroup1> - <favoriteActionGroup2>DuplicateClusterActionGroup</favoriteActionGroup2> - <isCreatable v="false" /> - <isDeletable v="false" /> - <isEditable v="false" /> - <columns> - <neonTableColumn> - <name>ebe8d904-449c-49bd-915c-5b4fee894bc2</name> - <entityField>CLUSTER_DESCRIPTION</entityField> - </neonTableColumn> - <neonTableColumn> - <name>48db4335-6bdb-4a6a-809e-f9c371733f85</name> - <entityField>COUNT_DUPLICATES_IN_CLUSTER</entityField> - </neonTableColumn> - </columns> - </tableViewTemplate> - </children> -</neonView> diff --git a/neonView/OrganisationDuplicatesTab_view/OrganisationDuplicatesTab_view.aod b/neonView/OrganisationDuplicatesTab_view/OrganisationDuplicatesTab_view.aod deleted file mode 100644 index ed86c8059d4920b5576f13406c5d4d4f86597b71..0000000000000000000000000000000000000000 --- a/neonView/OrganisationDuplicatesTab_view/OrganisationDuplicatesTab_view.aod +++ /dev/null @@ -1,24 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>OrganisationDuplicatesTab_view</name> - <title>Organisation duplicates</title> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - <direction>HORIZONTAL</direction> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>8b4ab951-afb3-4fac-915b-89226ab2f849</name> - <entityField>SelfOrganisationDuplicatesConsumer</entityField> - <view>OrganisationDuplicatesFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>d1e2ba4a-a5d6-4bba-a646-5918490e43a4</name> - <entityField>#ENTITY</entityField> - <view>OrganisationUnrelatedDuplicates_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/OrganisationUnrelatedDuplicates_view/OrganisationUnrelatedDuplicates_view.aod b/neonView/OrganisationUnrelatedDuplicates_view/OrganisationUnrelatedDuplicates_view.aod deleted file mode 100644 index 0b23c809aafd9bedf13995b9049e1be41c8a6ea2..0000000000000000000000000000000000000000 --- a/neonView/OrganisationUnrelatedDuplicates_view/OrganisationUnrelatedDuplicates_view.aod +++ /dev/null @@ -1,17 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>OrganisationUnrelatedDuplicates_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>edc0822d-4388-4e3d-93d7-2e46e32f5742</name> - <entityField>DuplicatesUnrelatedOrganisationConsumer</entityField> - <view>DuplicatesUnrelatedOrganisationFilter_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/PersonClusterMain_view/PersonClusterMain_view.aod b/neonView/PersonClusterMain_view/PersonClusterMain_view.aod deleted file mode 100644 index dcbd9badb6fe40452c89f4d4f54cef06fb5a0938..0000000000000000000000000000000000000000 --- a/neonView/PersonClusterMain_view/PersonClusterMain_view.aod +++ /dev/null @@ -1,23 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>PersonClusterMain_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - <direction>HORIZONTAL</direction> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>ff307d1c-9de7-4842-a697-05e783eca14b</name> - <entityField>DuplicatePersonsConsumer</entityField> - <view>PersonFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>0df195b2-9074-4734-b6df-be3bbf47050a</name> - <entityField>#ENTITY</entityField> - <view>DuplicatesUnrelatedCluster_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/PersonClusterPreview_view/PersonClusterPreview_view.aod b/neonView/PersonClusterPreview_view/PersonClusterPreview_view.aod deleted file mode 100644 index 69ab192629f5b172f18bc89eaccf2b384fdfb3bb..0000000000000000000000000000000000000000 --- a/neonView/PersonClusterPreview_view/PersonClusterPreview_view.aod +++ /dev/null @@ -1,22 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>PersonClusterPreview_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>156ebee5-6b13-462f-b956-0470f27a174b</name> - <entityField>DuplicatePersonsConsumer</entityField> - <view>PersonLookup_view</view> - </neonViewReference> - <neonViewReference> - <name>d8afce27-436c-42f0-a326-98bf6539b7bd</name> - <entityField>DuplicatesUnrelatedPersonConsumer</entityField> - <view>DuplicatesUnrelatedPersonFilter_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/PersonDublicatesTab_view/PersonDublicatesTab_view.aod b/neonView/PersonDublicatesTab_view/PersonDublicatesTab_view.aod deleted file mode 100644 index c11bc1396a663aa39b2f17a1b691d239ddbd79c6..0000000000000000000000000000000000000000 --- a/neonView/PersonDublicatesTab_view/PersonDublicatesTab_view.aod +++ /dev/null @@ -1,24 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>PersonDublicatesTab_view</name> - <title>Person duplicates</title> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <boxLayout> - <name>layout</name> - <direction>HORIZONTAL</direction> - </boxLayout> - </layout> - <children> - <neonViewReference> - <name>c82a1ae2-9f8f-4149-8bac-5621136d779b</name> - <entityField>SelfPersonDuplicatesConsumer</entityField> - <view>PersonDuplicatesFilter_view</view> - </neonViewReference> - <neonViewReference> - <name>2aee29fb-9844-4e3a-a284-a04dadf9eadc</name> - <entityField>DuplicatesUnrelatedPersonConsumer</entityField> - <view>DuplicatesUnrelatedPersonFilter_view</view> - </neonViewReference> - </children> -</neonView> diff --git a/neonView/PersonDuplicatesFilter_view/PersonDuplicatesFilter_view.aod b/neonView/PersonDuplicatesFilter_view/PersonDuplicatesFilter_view.aod deleted file mode 100644 index 0e95f05f72286d568edddded65e4cd0d21031823..0000000000000000000000000000000000000000 --- a/neonView/PersonDuplicatesFilter_view/PersonDuplicatesFilter_view.aod +++ /dev/null @@ -1,48 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<neonView xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.1.8" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/neonView/1.1.8"> - <name>PersonDuplicatesFilter_view</name> - <majorModelMode>DISTRIBUTED</majorModelMode> - <layout> - <groupLayout> - <name>layout</name> - </groupLayout> - </layout> - <children> - <tableViewTemplate> - <name>PersonDuplicatesTable</name> - <hideContentSearch v="true" /> - <entityField>#ENTITY</entityField> - <favoriteActionGroup1>PersonOpenClusterDetailActionGroup</favoriteActionGroup1> - <isCreatable v="false" /> - <isDeletable v="false" /> - <isEditable v="false" /> - <isSaveable v="false" /> - <linkedFrame></linkedFrame> - <columns> - <neonTableColumn> - <name>7508e984-6a0b-4126-ab49-452e2b54f76d</name> - <entityField>CLUSTER_DESCRIPTION</entityField> - </neonTableColumn> - <neonTableColumn> - <name>8615259b-de5b-493a-945d-2ff934ae1b8c</name> - <entityField>COUNT_DUPLICATES_IN_CLUSTER</entityField> - </neonTableColumn> - </columns> - </tableViewTemplate> - <treeTableViewTemplate> - <name>Treetable</name> - <entityField>#ENTITY</entityField> - <favoriteActionGroup1>PersonOpenClusterDetailActionGroup</favoriteActionGroup1> - <columns> - <neonTreeTableColumn> - <name>060c69aa-242a-4141-acd1-b82a76d99521</name> - <entityField>CLUSTER_DESCRIPTION</entityField> - </neonTreeTableColumn> - <neonTreeTableColumn> - <name>67458881-0d45-406e-a362-852711b09bd1</name> - <entityField>COUNT_DUPLICATES_IN_CLUSTER</entityField> - </neonTreeTableColumn> - </columns> - </treeTableViewTemplate> - </children> -</neonView> diff --git a/process/RebuildDuplicatesCache_serverProcess/RebuildDuplicatesCache_serverProcess.aod b/process/DuplicateMerge_lib/DuplicateMerge_lib.aod similarity index 65% rename from process/RebuildDuplicatesCache_serverProcess/RebuildDuplicatesCache_serverProcess.aod rename to process/DuplicateMerge_lib/DuplicateMerge_lib.aod index 078550c7200171ccff2e26f2515a616016da481a..bc0eb0235229e83f6e13f3215fd29f3dcb46bc1a 100644 --- a/process/RebuildDuplicatesCache_serverProcess/RebuildDuplicatesCache_serverProcess.aod +++ b/process/DuplicateMerge_lib/DuplicateMerge_lib.aod @@ -1,9 +1,9 @@ <?xml version="1.0" encoding="UTF-8"?> <process xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.2.2" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/process/1.2.2"> - <name>RebuildDuplicatesCache_serverProcess</name> + <name>DuplicateMerge_lib</name> <majorModelMode>DISTRIBUTED</majorModelMode> - <process>%aditoprj%/process/RebuildDuplicatesCache_serverProcess/process.js</process> + <process>%aditoprj%/process/DuplicateMerge_lib/process.js</process> <variants> - <element>EXECUTABLE</element> + <element>LIBRARY</element> </variants> </process> diff --git a/process/DuplicateMerge_lib/process.js b/process/DuplicateMerge_lib/process.js new file mode 100644 index 0000000000000000000000000000000000000000..c745f681018b6da88c83e567178804c6179d8a37 --- /dev/null +++ b/process/DuplicateMerge_lib/process.js @@ -0,0 +1,352 @@ +import("Communication_lib"); +import("Sql_lib"); +import("system.db"); +import("ActivityTask_lib"); +import("KeywordRegistry_basic"); +import("system.translate"); +import("DuplicateScanner_lib"); + +/** + * Methods for duplicate merging. + * Do not create an instance of this! + * + * @class + */ +function DuplicateMergeUtils() {} + +/* + * + * Merges the source person into the target person. + * This + * - replaces the source's with the target's contactid in a predefined set of tables. + * - resets the standard communications of the source contact and keeps the ones of the target. + * - updates participants of campaigns and removes obsolet ones(which would be duplicates) + * - deletes the source person and contact + * - deletes the duplicate record, if one exists + * - deletes all unrelated-duplicate-relations containing the source contact id + * + * @param {String} pSourceContactId The contact to be integrated into another + * @param {String} pTargetContactId The contact in which the source gets integrated + * @returns {Boolean} if the merge was sucessful + */ +DuplicateMergeUtils.mergePerson = function(pSourceContactId, pTargetContactId) +{ + var sourcePersonId = newSelect("PERSON_ID") + .from("CONTACT") + .where("CONTACT.CONTACTID", pSourceContactId) + .cell(); + var targetPersonId = newSelect("PERSON_ID") + .from("CONTACT") + .where("CONTACT.CONTACTID", pTargetContactId) + .cell(); + + DuplicateMergeUtils._deleteUniqueAttributes(pSourceContactId, pTargetContactId); + var isLinkedDataUpdated = DuplicateMergeUtils._migrateLinkedContactData(pSourceContactId, pTargetContactId); + var isParticipantsUpdated = DuplicateMergeUtils._migrateParticipantsToNewContact("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", + pSourceContactId, pTargetContactId); + DuplicateMergeUtils._updateOtherContacts(pSourceContactId, sourcePersonId, targetPersonId); + + var deleteStatements = []; + if (sourcePersonId != targetPersonId) + deleteStatements.push(newWhere("PERSON.PERSONID", sourcePersonId).buildDeleteStatement()); + + deleteStatements.push(newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement()); + deleteStatements = deleteStatements.concat(DuplicateMergeUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); + + //update binary + var metaData = db.getBinaryMetadata("CONTACT", "DOCUMENT", pSourceContactId, true, SqlUtils.getBinariesAlias()); + metaData.forEach(function (binaryMetaData) + { + db.updateBinaryAssignment(binaryMetaData.id, "CONTACT", "DOCUMENT", pTargetContactId, SqlUtils.getBinariesAlias()); + }); + var deletedRows = db.deletes(deleteStatements) + + DuplicateScannerUtils.deleteHasDuplicateEntries("Person_entity", [pSourceContactId]); + + return isLinkedDataUpdated || isParticipantsUpdated || deletedRows > 0; +} + +DuplicateMergeUtils.mergeOrganisation = function(pSourceContactId, pTargetContactId) +{ + var sourceOrganisationId = newSelect("ORGANISATION_ID") + .from("CONTACT") + .where("CONTACT.CONTACTID", pSourceContactId) + .cell(); + + var targetOrganisationId = newSelect("ORGANISATION_ID") + .from("CONTACT") + .where("CONTACT.CONTACTID", pTargetContactId) + .cell(); + + DuplicateMergeUtils._deleteUniqueAttributes(pSourceContactId, pTargetContactId); + DuplicateMergeUtils._migrateLinkedContactData(pSourceContactId, pTargetContactId); + DuplicateMergeUtils._migrateParticipantsToNewContact("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", + pSourceContactId, pTargetContactId); + DuplicateMergeUtils._migratePersonsToNewOrganisation(sourceOrganisationId, targetOrganisationId); + + var deleteStatements = []; + deleteStatements.push(newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement()); + deleteStatements = deleteStatements.concat(DuplicateMergeUtils._buildDeleteOrganisationAndContactQuery(sourceOrganisationId, pSourceContactId)); + deleteStatements = deleteStatements.concat(DuplicateMergeUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); + + //update binary + var metaData = db.getBinaryMetadata("CONTACT", "DOCUMENT", pSourceContactId, true, SqlUtils.getBinariesAlias()); + metaData.forEach(function (binaryMetaData) + { + db.updateBinaryAssignment(binaryMetaData.id, "CONTACT", "DOCUMENT", pTargetContactId, SqlUtils.getBinariesAlias()); + }); + var deletedRows = db.deletes(deleteStatements) + + DuplicateScannerUtils.deleteHasDuplicateEntries("Organisation_entity", [pSourceContactId]); + + return deletedRows >= 2; +} + +DuplicateMergeUtils.createMergeSuccessActivity = function(pSourceContactId, pTargetContactId, pCurrentContactId, pContext) +{ + var activityDataForInsert = { + subject: translate.withArguments("A %0 record has been merged", [pContext]), + content: translate.withArguments("%0 with ID \"%1\" has been integrated into the %0 with the ID \"%2\"", [pContext, pSourceContactId, pTargetContactId]), + //categoryKeywordId: $KeywordRegistry.ac + directionKeywordId: $KeywordRegistry.activityDirection$internal(), + responsibleContactId: pCurrentContactId + }; + var activityLinks = [[pContext, pTargetContactId]]; + + return ActivityUtils.insertNewActivity(activityDataForInsert, activityLinks, null, db.getCurrentAlias()); +} + +/* + * Persons get reassigned to new organisation + * + * @returns {Boolean} If records have been updated + */ +DuplicateMergeUtils._migratePersonsToNewOrganisation = function (pSourceOrganisationId, pTargetOrganisationId) +{ + var updateCount = newWhereIfSet(["CONTACT", "ORGANISATION_ID"], pSourceOrganisationId) + .and("PERSON_ID is not null") + .updateFields(new Map().set("ORGANISATION_ID", pTargetOrganisationId), "CONTACT"); + + var deleteCount = newWhere(["CONTACT", "ORGANISATION_ID"], pSourceOrganisationId) + .and("PERSON_ID is not null") + .deleteData(); + return updateCount > 0 || deleteCount > 0; +} + +/* + * Person and organisations keep the max_count = 1 attribute of the target and the one from the source is deleted + * + * @returns {Boolean} If records have been deleted + */ +DuplicateMergeUtils._deleteUniqueAttributes = function (pSourceContactId, pTargetContactId) +{ + var targetAttrUnique = newSelect("AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID") + .from("AB_ATTRIBUTERELATION") + .join("AB_ATTRIBUTEUSAGE on AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID = AB_ATTRIBUTEUSAGE.AB_ATTRIBUTE_ID") + .where("AB_ATTRIBUTEUSAGE.MAX_COUNT = 1") + .and("AB_ATTRIBUTERELATION.OBJECT_ROWID", pTargetContactId) + .table(); + var deleteCount = 0; + + targetAttrUnique.forEach(function(attribute){ + deleteCount += newWhereIfSet("AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID", attribute) + .and("AB_ATTRIBUTERELATION.OBJECT_ROWID", pSourceContactId) + .deleteData(); + }); + + return deleteCount > 0; +} + +DuplicateMergeUtils._migrateLinkedContactData = function (pSourceContactId, pTargetContactId) +{ + var updateStatements = new Map(); + var currentAlias = db.getCurrentAlias(); + + var setStandardsStatements = []; + var [standardPhone, standardMail, standardAddressId] = newSelect([ + "(" + CommUtil.getStandardSubSqlPhone() + ")", + "(" + CommUtil.getStandardSubSqlMail() + ")", + "CONTACT.ADDRESS_ID" + ]) + .from("CONTACT") + .where("CONTACT.CONTACTID", pTargetContactId) + .arrayRow(); + + //if the targetContact already has a standard phone comm, set the comm from the sourceContact as not standard + if (standardPhone) + { + setStandardsStatements.push( + newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId) + .and("COMMUNICATION.MEDIUM_ID", CommUtil.getMediumIdsByCategory("PHONE"), SqlBuilder.IN()) + .buildUpdateStatement({"ISSTANDARD" : "0"}) + ); + } + if (standardMail) + { + setStandardsStatements.push( + newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId) + .and("COMMUNICATION.MEDIUM_ID", CommUtil.getMediumIdsByCategory("EMAIL"), SqlBuilder.IN()) + .buildUpdateStatement({"ISSTANDARD" : "0"}) + ); + } + //set the standardaddress of the sourceContact as standard of the targetContact if it doesn't have one set yet + if(!standardAddressId) + { + var sourceStandard = newSelect("CONTACT.ADDRESS_ID") + .from("CONTACT") + .where("CONTACT.CONTACTID", pSourceContactId) + .cell(); + setStandardsStatements.push(newWhere("CONTACT.CONTACTID", pTargetContactId).buildUpdateStatement({"ADDRESS_ID": sourceStandard})); + } + + updateStatements.set(currentAlias, setStandardsStatements); + + DuplicateMergeUtils._getLinkedTableInfos(pTargetContactId).forEach(function ([tableName, columnName, additionalCondition, dbAlias]) + { + if (!dbAlias) + dbAlias = currentAlias; + + if (!updateStatements.has(dbAlias)) + updateStatements.set(dbAlias, []); + var statements = updateStatements.get(dbAlias); + + var updateValues = {}; + updateValues[columnName] = pTargetContactId; + + var updateCondition = new SqlBuilder(dbAlias).where([tableName, columnName], pSourceContactId).andIfSet(additionalCondition); + + //push must be used here to keep the reference + statements.push(updateCondition.buildUpdateStatement(updateValues, tableName)); + }); + + var totalChanges = 0; + + updateStatements.forEach(function (statements, alias) + { + totalChanges += db.updates(statements, alias); + }); + + totalChanges += newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId).deleteData(); //delete leftover communications from the source contact + totalChanges += new AttributeRelationQuery(pSourceContactId).deleteAllAttributes(); //delete leftover attributes + + return totalChanges > 0; +} + +/* + * All records with contactId = sourceContactId get updated, which are not assigned to the same "group" as the targetContactId. + * This is because otherwise there would now be in total two "participants" in the same "group" as opposed to one before. + * Also if they already are in the same "group" those records shouldn't be updated because it would lead to the same outcome. + * + * @returns {Boolean} If records have been updated + */ +DuplicateMergeUtils._migrateParticipantsToNewContact = function (pTableName, pContactIdColumn, pAssignableIdColumn, pSourceContactId, pTargetContactId) +{ + var excludedIds = newSelect(pAssignableIdColumn) + .from(pTableName) + .where([pTableName, pContactIdColumn], pTargetContactId) + .arrayColumn(); + + var updateCount = newWhereIfSet([pTableName, pAssignableIdColumn], excludedIds, SqlBuilder.NOT_IN()) + .and([pTableName, pContactIdColumn], pSourceContactId) + .updateFields(new Map().set(pContactIdColumn, pTargetContactId), pTableName); + + var deleteCount = newWhere([pTableName, pContactIdColumn], pSourceContactId) + .tableName(pTableName) + .deleteData(); + + return updateCount > 0 || deleteCount > 0; +} + +/* + * Update other contacts from the source + * + * @returns {Boolean} If records have been updated + */ +DuplicateMergeUtils._updateOtherContacts = function (pSourceContactId, sourcePersonId, targetPersonId) +{ + var otherContacts = newSelect("PERSON.PERSONID") + .from("PERSON") + .join("CONTACT", "CONTACT.PERSON_ID = PERSON.PERSONID") + .where("PERSON.PERSONID", sourcePersonId) + .and("CONTACT.CONTACTID", pSourceContactId, SqlBuilder.NOT_EQUAL()) + .table(); + var updateCount = 0; + + otherContacts.forEach(function(person){ + updateCount += newWhere("CONTACT.PERSON_ID", person) + .updateFields({"PERSON_ID" : targetPersonId}, "CONTACT"); + }); + + return updateCount > 0; +} + +DuplicateMergeUtils._buildDeleteCachedUnrelatedDuplicateQuery = function(pSourceContactId) +{ + return [ + newWhere("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pSourceContactId).buildDeleteStatement(), + newWhere("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pSourceContactId).buildDeleteStatement() + ]; +} + +DuplicateMergeUtils._buildDeleteOrganisationAndContactQuery = function(pSourceOrganisationId, pSourceContactId) +{ + return [ + newWhere("ORGANISATION.ORGANISATIONID", pSourceOrganisationId).buildDeleteStatement(), + newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement() + ]; +} + +/* + * Contains all Tables and their fields which may contain the contact id to be replaced for the data alias + * + * @returns {String[[]]} Array in the format [TableName, ContactIdColumnName, AdditionalCondition, alias] + */ +DuplicateMergeUtils._getLinkedTableInfos = function(pTargetContactId) +{ + //don't use communications that the target already has + var targetComms = newSelect("COMMUNICATION.ADDR") + .from("COMMUNICATION") + .where("COMMUNICATION.CONTACT_ID", pTargetContactId) + .arrayColumn(); + + var communicationDedupCondition = targetComms.length > 0 + ? newWhere("COMMUNICATION.ADDR", targetComms, SqlBuilder.NOT_IN()) + : ""; + + return[ + ["AB_APPOINTMENTLINK", "OBJECT_ROWID"], + ["AB_CTILOG", "CONTACT_ID"], + ["AB_OBJECTRELATION", "AB_OBJECTRELATIONID"], + ["AB_OBJECTRELATION", "OBJECT1_ROWID"], + ["AB_OBJECTRELATION", "OBJECT2_ROWID"], + ["AB_LOGHISTORY", "TABLENAMEID"], + ["ADDRESS", "CONTACT_ID"], + ["BULKMAILRECIPIENT", "CONTACT_ID"], + ["BULKMAIL", "TESTING_CONTACT_ID"], + ["CAMPAIGN", "EMPLOYEE_CONTACT_ID"], + ["CAMPAIGNSTEP", "EMPLOYEE_CONTACT_ID"], + ["COMMRESTRICTION", "CONTACT_ID"], + ["COMMRESTRICTION", "EMPLOYEE_INVOLVED"], + ["COMMUNICATION", "CONTACT_ID", communicationDedupCondition], + ["COMPETITION", "CONTACT_ID"], + ["CONTRACT", "CONTACT_ID"], + ["LETTERRECIPIENT", "CONTACT_ID"], + ["OBJECTMEMBER", "CONTACT_ID"], + ["OFFER", "CONTACT_ID"], + ["PRODUCT", "CONTACT_ID"], + ["PRODUCTPRICE", "CONTACT_ID"], + ["SALESORDER", "CONTACT_ID"], + ["SALESPROJECT", "CONTACT_ID"], + ["TASK", "REQUESTOR_CONTACT_ID"], + ["TASK", "EDITOR_CONTACT_ID"], + ["TASKLINK", "OBJECT_ROWID"], + ["ACTIVITY", "RESPONSIBLE"], + ["DSGVO", "CONTACT_ID"], + ["DSGVOINFO", "CONTACT_ID"], + ["TIMETRACKING", "CONTACT_ID"], + ["ACTIVITYLINK", "OBJECT_ROWID"], + ["AB_ATTRIBUTERELATION", "OBJECT_ROWID"], + + ["ASYS_CALENDARLINK", "DBID", "", SqlUtils.getSystemAlias()] + ]; +} diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js index e26053edc6cb753291a842212cbfcef324124799..525c2ec33da369445f2f42cdb51804650933cc55 100644 --- a/process/DuplicateScanner_lib/process.js +++ b/process/DuplicateScanner_lib/process.js @@ -1,20 +1,12 @@ import("system.project"); -import("ActivityTask_lib"); -import("KeywordRegistry_basic"); -import("system.translate"); -import("system.datetime"); -import("JditoFilter_lib"); -import("system.process"); import("system.util"); import("system.vars"); -import("system.net"); -import("system.logging"); import("system.db"); import("system.entities"); import("Sql_lib"); import("system.indexsearch"); -import("Communication_lib"); -import("Attribute_lib"); +import("IndexSearch_lib"); +import("system.process"); /** * Methods for duplicate scanning. @@ -24,1356 +16,277 @@ import("Attribute_lib"); */ function DuplicateScannerUtils() {} -/* - * Loads all prefilters for a scanner in the form of arrays in an array. - * Single filter record: [FILTER_CONDITION, COUNT_CHARACTERS_TO_USE, MAX_RESULTS_THRESHOLD] - * - * @param {String} pFilterName Name of the filter - * @param {String} pTargetEntity Entity which has been configured - * @returns {String[[]]} Array of arrays containing the configured values - */ -DuplicateScannerUtils.loadFilters = function(pFilterName, pTargetEntity) -{ - let query = newSelect("FILTER_CONDITION, COUNT_CHARACTERS_TO_USE, MAX_RESULTS_THRESHOLD") - .from("DUPLICATESCANNERPREFILTERCONFIG") - .join("DUPLICATESCANNER", "DUPLICATESCANNER.ID = DUPLICATESCANNERPREFILTERCONFIG.DUPLICATESCANNER_ID") - .where("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity); - - return query.table(); -} - -/* - * Deletes the cached duplicate for the given id. - * If there would only remain one item in the cluster after deletion, the whole cluster including the duplicate gets deleted. - * In this case, all records marked as unrelated duplicate will be deleted aswell. - * - * @param {String} pDuplicateId Id of the duplicate to delete - */ -DuplicateScannerUtils.deleteCachedDuplicate = function(pDuplicateId) -{ - var [countDuplicatesInClusterWithoutParameterId, clusterId] = newSelect("count(ID), CLUSTERID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.CLUSTERID", newSelect("CLUSTERID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId).build(), - SqlBuilder.IN()) - .and("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId, SqlBuilder.NOT_EQUAL()) - .groupBy("CLUSTERID") - .arrayRow(); - - //If only one duplicate would be remaining, - //the whole cluster has to be deleted because there are no more duplicates. - //Otherwise delete just the single duplicate. The parameterized duplicate has been excluded via sql - //therefore check for smaller/equals 1 - if(countDuplicatesInClusterWithoutParameterId <= 1) - { - newWhere("DUPLICATECLUSTERS.CLUSTERID", clusterId).deleteData(); - newWhere("UNRELATEDDUPLICATES.CLUSTERID", clusterId).deleteData(); - } - else - { - newWhereIfSet("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId).deleteData(); - - //Delete all records where this duplicateId is mentioned - DuplicateScannerUtils.deleteAllUnrelatedDuplicateRelations(pDuplicateId); - } -} - -/* - * Deletes all Clusters for the given target Entity. - * No records markes as unrelated duplicate are being deleted. - * - * @param {String} pTargetEntity Entity which has been configured - * @return Count of deleted rows - */ -DuplicateScannerUtils.deleteClustersByTargetEntity = function(pTargetEntity) -{ - return newWhereIfSet("DUPLICATECLUSTERS.TARGET_ENTITY", pTargetEntity).deleteData(); -} - -/* - * Updates the duplicate relations markes as unrelated for the given target entity. - * All ClusterIds get updated with the new values if the same combination of duplicateIds - * still exists in the DUPLICATECLUSTERS table. - * - * Afterwards, all records which contain a nonexistend clusterId are being deleted - * - * @param {String} pTargetEntity Name of Entity whose duplicates should be updated +/** + * Returns an sql condition which returns a list of duplicate ids. + * The count is checked using pCount and pOperator. + * This function can be used for FilterExtensions + * + * @param {string[]} pEntities target entities + * @param {number} pCount value of count (will be applied to the operator) + * @param {string} pOperator the operator in sql notation e.g.: # = ? + * + * @returns {SqlBuilder} subselect selecting the duplicate ids */ -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations = function(pTargetEntity) +DuplicateScannerUtils.getDuplicateConditionalListSql = function(pEntities, pCount, pOperator) { - /* - * Update all records with the current valid clusterId where the same duplicateId combination exists - */ - let clusterIdChanges = newSelect("dc1.CLUSTERID, ud.CLUSTERID") - .from("UNRELATEDDUPLICATES", "ud") - .join("DUPLICATECLUSTERS", "dc1.DUPLICATEID = ud.SOURCEDUPLICATEID", "dc1") - .join("DUPLICATECLUSTERS", "dc2.DUPLICATEID = ud.UNRELATEDDUPLICATEID", "dc2") - .where(["DUPLICATECLUSTERS", "TARGET_ENTITY", "dc1"], pTargetEntity) - .table(); - - let updateStatements = clusterIdChanges.map(function ([newClusterId, oldClusterId]) - { - return newWhere("UNRELATEDDUPLICATES.CLUSTERID", oldClusterId).buildUpdateStatement({ - "CLUSTERID": newClusterId - }, "UNRELATEDDUPLICATES"); - }); + var subselect = newSelect("count(*)").from("UNRELATEDDUPLICATES") + .where("UNRELATEDDUPLICATES.DUPLICATETYPE = HASDUPLICATE.OBJECT_TYPE") + .and("UNRELATEDDUPLICATES.SOURCEDUPLICATEID = HASDUPLICATE.OBJECT_ROWID"); - db.updates(updateStatements); - - /* - * All unrelated duplicate ids that still exist in a cluster, have been updated with the new cluster id. - * All records with a nonexistend clusterid can now be deleted because they haven't been detected as a duplicate any more. - */ - newWhere("UNRELATEDDUPLICATES.CLUSTERID", newSelect("dc1.CLUSTERID").from("DUPLICATECLUSTERS", "dc1"), SqlBuilder.NOT_IN()) - .deleteData(); -} - -/* - * Creates a relation between two duplicates which means they are unrelated. - * They will not appear in each others duplicate tab any more. - * To remove this relation use DuplicateScannerUtils.deleteUnrelatedDuplicateRelation - * - * @param {String} pSourceContactId Id of first duplicate - * @param {String} pUnrelatedContactId Id of second duplicate - * @param {String} pClusterId Id of the cluster in which the duplicates are aggregated - * @returns {String} Number of Records inserted - */ -DuplicateScannerUtils.createUnrelatedDuplicateRelation = function(pSourceContactId, pUnrelatedContactId, pClusterId) -{ - return new SqlBuilder().insertFields({ - "UNRELATEDDUPLICATEID": pUnrelatedContactId, - "SOURCEDUPLICATEID": pSourceContactId, - "CLUSTERID": pClusterId - }, "UNRELATEDDUPLICATES", "ID"); -} - -/* - * Gets the cluster id in which the given duplicate id exists - * - * @param {String} pDuplicateId whose cluster id should be searched - * @returns {String} Cluster id - */ -DuplicateScannerUtils.getClusterId = function(pDuplicateId) -{ - return newSelect("CLUSTERID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId) - .cell(); + return newSelect("HASDUPLICATE.OBJECT_ROWID").from("HASDUPLICATE") + .where("HASDUPLICATE.OBJECT_TYPE", pEntities, SqlBuilder.IN()) + .and(pOperator.replace("#", "(HASDUPLICATE.DUPLICATECOUNT - (" + subselect.toString() + "))").replace("?", pCount)); } -/* - * Deletes the "unrelated" relation between two duplicates - * - * @param {String} pSourceDuplicateId Id of the source duplicate - * @param {String} pUnrelatedDuplicateId Id of the source duplicate - * @returns {String} Number of records deleted - */ -DuplicateScannerUtils.deleteUnrelatedDuplicateRelation = function(pSourceDuplicateId, pUnrelatedDuplicateId) -{ - return newWhere("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pSourceDuplicateId) - .and("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pUnrelatedDuplicateId) - .deleteData(); -} - -/* - * Deletes all relations to a duplicate id wether the id is the source or the unrelated duplicate in the relation - * - * @param {String} pDuplicateId Duplicate id whose "unrelated" relations are to delete - * @returns {String} Number of records deleted - */ -DuplicateScannerUtils.deleteAllUnrelatedDuplicateRelations = function(pDuplicateId) -{ - DuplicateScannerUtils.deleteUnrelatedDuplicateRelation(pDuplicateId, pDuplicateId); -} - -/* - * Loads all other duplicates from the cluster in which the parameterized duplicate is located - * - * @param {String} pDuplicateId - * @returns {String[]} Array of duplicate ids - */ -DuplicateScannerUtils.getCachedDuplicatesForDuplicateId = function(pDuplicateId) -{ - return newSelect("DUPLICATEID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.CLUSTERID", newSelect("CLUSTERID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId), - SqlBuilder.IN()) - .and("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateId, SqlBuilder.NOT_EQUAL()) - .and("DUPLICATECLUSTERS.DUPLICATEID", newSelect("UNRELATEDDUPLICATEID") - .from("UNRELATEDDUPLICATES") - .where("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pDuplicateId), - SqlBuilder.NOT_IN()) - .and("DUPLICATECLUSTERS.DUPLICATEID", newSelect("SOURCEDUPLICATEID") - .from("UNRELATEDDUPLICATES") - .where("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pDuplicateId), - SqlBuilder.NOT_IN()) - .arrayColumn(); -} - -/* - * Returns all duplicate ids which haven't been marked as unrelated for the given cluster id. - * - * @param {String} pClusterId The clusters id - * @return {String[]} Array of duplicate ids excluding those marked as unrelated - */ -DuplicateScannerUtils.getCachedDuplicatesForClusterId = function(pClusterId) -{ - return newSelect("DUPLICATEID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.DUPLICATEID", newSelect("UNRELATEDDUPLICATEID").from("UNRELATEDDUPLICATES"), - SqlBuilder.NOT_IN()) - .and("DUPLICATECLUSTERS.CLUSTERID", pClusterId) - .arrayColumn(); -} - -/* - * Recreates the cached duplicate clusters based on the configured pattern. <br /> - * The old clusters have to be deleted manually beforehand or by using "deleteClustersByTargetEntity".<br /> - * If there have already been ignored relations between duplicate records, it's advised to call "refreshUnrelatedDuplicateRelations" after the recreation of the duplicates cache.<br /> - * Please check the documentation of the params on how to get the infos required.<br /> - * <br /> - * If the usage of an external webservice has been activated, the search will be executed beforehand and the results will then be given to the pFormatValuesConsumeWebserviceCallback via parameter.<br /> - * To access the values it is advised to run thru the parameter like an array and access its value by key which is the index field name. The entity<br /> - * field names can be converted using DuplicateScannerUtils.translateEntityToIndexFields. For further infos see the example section below.<br /> - * <br /> - * Attention!<br /> - * If it is configured to use the external webservice callback the values have to be in the same format as they are in the parameter of the callback.<br /> - * <br /> - * @param {String} pFilterName Name of the filter to use - * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration - * @param {String} pRecordsBlockSize The values which are checked get loaded in blocks. - * @param {String} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter. - * @return {Int} Count of duplicate clusters created - * - * @example - * var filterName = "PersonDuplicates"; - * var targetEntity = "Person_entity"; - * var recordBlockSize = DuplicateScannerUtils.getBlockSize(); - * - * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) - * { - * let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) - * - * //Run thru every duplicate result an read out the resultfields - * for (let i = 0; i < pPossibleDuplicatesRay.length; i++) - * { - * for (let b = 0; b < resultFields.length; b++) - * { - * let entityFieldName = resultFields[b]; - * let indexFieldName = indexResultFields[entityFieldName]; - * //format values - * } - * } - * //call webservice - * //reformat results to same structure as before - * return pPossibleDuplicatesRay; - * }; - * - * DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity); - * - * DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, pRecordsBlockSize, formatToJsonAndCallWsCallback); - * - * DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity); +/** + * Ignores duplicates corrosponding to the type, the source, and the duplicate ids + * + * @param {string} pDuplicateType the duplicate type e.g.: Organisation_entity + * @param {string} pSourceDuplicateId the source duplicate id + * @param {string[]} pUnrelatedDuplicateIds the target duplicate ids */ -DuplicateScannerUtils.rebuildDuplicatesCache = function(pFilterName, pTargetEntity, - pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback) +DuplicateScannerUtils.ignoreDuplicates = function(pDuplicateType, pSourceDuplicateId, pUnrelatedDuplicateIds) { - let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); - let resultFields = DuplicateScannerUtils.getResultFields(filterName, targetEntity); - - let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); - let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); - let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity); - - let alreadyIdentifiedIds = []; + newWhere("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pSourceDuplicateId) + .and("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pUnrelatedDuplicateIds, SqlBuilder.IN()) + .deleteData(); - let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs(entityFieldConfigs); - - entityFields.push(entityIdField); + var TABLE_NAME = "UNRELATEDDUPLICATES"; + var COLUMN_NAMES = ["ID", "DUPLICATETYPE", "SOURCEDUPLICATEID", "UNRELATEDDUPLICATEID"]; + var COLUMN_TYPES = db.getColumnTypes(TABLE_NAME, COLUMN_NAMES); - let targetRecords = DuplicateScannerUtils.getEntityRecords(pTargetEntity, entityFields, 0, pRecordsBlockSize); - - let currentRecordIndex = pRecordsBlockSize; - while(targetRecords.length > 0) - { - - foundDuplicateIds = DuplicateScannerUtils.scanRecords(pTargetEntity, targetRecords, - entityFieldConfigs, resultFields, useExternalWebservice, - pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds, indexPattern, entityIdField); - - if (foundDuplicateIds) - alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds); - - if(targetRecords.length < pRecordsBlockSize) - { - break; - } - targetRecords = DuplicateScannerUtils.getEntityRecords(pTargetEntity, entityFields, - currentRecordIndex, pRecordsBlockSize); - - currentRecordIndex += pRecordsBlockSize; - } -} - -DuplicateScannerUtils.scanRecords = function(pTargetEntity, pTargetRecordsData, - pEntityFieldConfigs, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds, pIndexPattern, pEntityIdField) -{ - let foundDuplicateIds = []; - - //If the contact id loading query has no results, stop. - //No ids should be deleted if an error has been made in this query. - if(pTargetRecordsData.length <= 0) - return foundDuplicateIds; - - //First it gets checked if the current id has already been identified. If that's the case it'll continue with the next. - //Otherwise an object gets build in the form of ["FilterFieldName" = "FilterFieldValueFromQuery"] with which a scan for possible duplicates get's started - var duplicatesToInsertQueries = []; - for (let b = 0; b < pTargetRecordsData.length; b++) + var stmts = []; + for(let i = 0; i < pUnrelatedDuplicateIds.length; i++) { - let entityFieldValuesRay = DuplicateScannerUtils.buildEntityFieldConfigs(pEntityFieldConfigs, pTargetRecordsData[b]); - - //The first field in this Array must always be the configured id field. This is ensured using onValidation-logic - let idValue = pTargetRecordsData[b][pEntityIdField]; - - //If the current Id has already been identified, continue - if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][pEntityIdField]) > -1) - { - continue; - } - - let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pTargetEntity, - entityFieldValuesRay, pResultFields, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern) - - if(foundDuplicates == null || foundDuplicates.length == 0) - { - continue; - } - //Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron. - for (let i = 0; i < foundDuplicates.length; i++) - { - let localId = foundDuplicates[i][indexsearch.FIELD_ID]; - foundDuplicateIds.push(localId); - } - - pAlreadyIdentifiedIds = pAlreadyIdentifiedIds.concat(foundDuplicateIds); - - //The duplicates list contains only the found duplicates to the original id, therefore it get's added manually - foundDuplicateIds.push(pTargetRecordsData[b][pEntityIdField]); - - let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity) - duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay); - foundDuplicateIds = []; + var columnValues = [util.getNewUUID(), pDuplicateType, pSourceDuplicateId, pUnrelatedDuplicateIds[i]]; + stmts.push([TABLE_NAME, COLUMN_NAMES, COLUMN_TYPES, columnValues]); } - db.inserts(duplicatesToInsertQueries, db.getCurrentAlias(), 10 * datetime.ONE_MINUTE); - return foundDuplicateIds; + db.inserts(stmts); } -/* - * Searches for a cluster which contains the duplicates specified by the parameterized array. <br /> - * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned. +/** + * Removes ignored duplicates from an array of duplicate ids * - * @param {String} pNewRecordId The id of the record which was used to scan for duplicates - * @param {String[]} pDuplicateIds Duplicate ids used to search for a cluster containing them - * @param {String} pTargetEntity Entity which has been configured - * @returns {String} A clusterid if a matching cluster has been found, otherwise "" + * @param {string} pTargetEntity the target entity e.g.: Organisation_entity + * @param {string} pTargetUid the source duplicate id + * @param {string} pIdArray the array of duplicate ids + * + * @returns {string[]} the filtered duplicate ids */ -DuplicateScannerUtils.cacheNewScanResults = function(pNewRecordId, pDuplicateIds, pTargetEntity) +DuplicateScannerUtils.filterIgnored = function(pTargetEntity, pTargetUid, pIdArray) { - let duplicateIds = []; - //Run thru every duplicate result and read out the id. - //Do it now to have a simple array on all usages lateron. - for (let i = 0; i < pDuplicateIds.length; i++) + if(pIdArray.length == 0) { - let duplicateContactId = pDuplicateIds[i][indexsearch.FIELD_ID]; - duplicateIds.push(duplicateContactId); + return pIdArray; } - - let clusterId = DuplicateScannerUtils.getClusterWithIdenticalDuplicates(duplicateIds); - - //If no cluster has beend found, create a new one with all found duplicateIds, - //otherwise add the id to the existing cluster - let idRayToInsert = []; - if(clusterId == undefined || clusterId == null || clusterId == "") - { - idRayToInsert = duplicateIds; - idRayToInsert.push(pNewRecordId); - } - else - idRayToInsert.push(pNewRecordId); - - insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(idRayToInsert, pTargetEntity, clusterId) - - return db.inserts(insertQueriesRay); + var ignoreTable = newSelect(["UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID"]) + .from("UNRELATEDDUPLICATES") + .where("UNRELATEDDUPLICATES.DUPLICATETYPE", pTargetEntity) + .and("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pTargetUid) + .and("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pIdArray, SqlBuilder.IN()) + .arrayColumn(); + return pIdArray.filter(function(curr) { + return !ignoreTable.includes(curr); + }); } -/* - * Searches for a cluster which contains the duplicates specified by the parameterized array. <br /> - * The contents of the cluster have to be identical, if no fitting cluster could be found an empty string is returned. +/** + * Deletes duplicates corrosponding to the given type and ids * - * @param {String[]} pDuplicateIds Duplicate ids which should be in the same cluster - * @returns {String} Id of the cluster which contains all given duplicate ids or "" + * @param {string} pObjectType the duplicate type e.g.: Organisation_entity + * @param {string[]} pObjectRowIds the duplicate ids e.g. some organisation ids */ -DuplicateScannerUtils.getClusterWithIdenticalDuplicates = function(pDuplicateIds) +DuplicateScannerUtils.deleteHasDuplicateEntries = function(pObjectType, pObjectRowIds) { - let RESULT_NO_CLUSTER_FOUND = ""; - - if(pDuplicateIds.length < 1) - return RESULT_NO_CLUSTER_FOUND; - - let clusterIdSelect = newSelect("distinct CLUSTERID") - .from("DUPLICATECLUSTERS") - .where(); - - for (let i = 0; i < pDuplicateIds.length; i++) - { - clusterIdSelect.and("DUPLICATECLUSTERS.CLUSTERID", newSelect("CLUSTERID").from("DUPLICATECLUSTERS").where("DUPLICATECLUSTERS.DUPLICATEID", pDuplicateIds[i]), - SqlBuilder.IN()); - } - - let foundClusterId = clusterIdSelect.cell(); - - if(foundClusterId == null || foundClusterId == "") - return RESULT_NO_CLUSTER_FOUND; - - let duplicatesInCluster = newSelect("DUPLICATEID") - .from("DUPLICATECLUSTERS") - .where("DUPLICATECLUSTERS.CLUSTERID", foundClusterId) - .arrayColumn(); - - /* - * A cluster has been searched which contains all duplicate ids as specified via parameter. - * There's the possibility that this cluster contains even more duplicates than specified via the parameter. - * In this case, the cluster and the parameterized duplicateids are not identical - * which means a new cluster has to be created. - */ - if(pDuplicateIds.length != duplicatesInCluster.length) - return RESULT_NO_CLUSTER_FOUND; - else - return foundClusterId; + newWhere("HASDUPLICATE.OBJECT_TYPE", pObjectType) + .and("HASDUPLICATE.OBJECT_ROWID", pObjectRowIds, SqlBuilder.IN()) + .deleteData(); } -DuplicateScannerUtils.getEntityRecords = function(pTargetEntity, pEntityFields, pStartRow, pCountRecordsToLoad) -{ - let getRowsConfig = entities.createConfigForLoadingRows() - .entity(pTargetEntity) - .fields(pEntityFields) - .count(pCountRecordsToLoad) - .startrow(pStartRow); - return entities.getRows(getRowsConfig); -} -/* - * Loads the configured resultfields as array - * - * @param {String} pFilterName Name of the filter - * @param {String} pTargetEntity Entity which has been configured - * @returns {String[]} Resultfields as array +/** + * Manually inserts a duplicate entry by the given type, id and count + * + * @param {string} pObjectType the type e.g.: Organisation_entity + * @param {string} pObjectRowId the id e.g. a organisation id + * @param {number} pCount the count / how many duplicates are there */ -DuplicateScannerUtils.getResultFields = function(pFilterName, pTargetEntity) +DuplicateScannerUtils.insertHasDuplicateEntry = function(pObjectType, pObjectRowId, pCount) { - return newSelect("dsrfc.ENTITY_FIELD_NAME") - .from("DUPLICATESCANNERRESULTFIELDCONFIG dsrfc") - .join("DUPLICATESCANNER", "DUPLICATESCANNER.ID = dsrfc.DUPLICATESCANNER_ID") - .where("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .arrayColumn(); + var COLUMNS = [ + "HASDUPLICATEID", + "OBJECT_TYPE", "OBJECT_ROWID", + "DUPLICATECOUNT" + ]; + var COLUMN_TYPES = db.getColumnTypes("HASDUPLICATE", COLUMNS); + var values = [ + util.getNewUUID(), + pObjectType, pObjectRowId, + pCount.toFixed(0) + ]; + db.insertData("HASDUPLICATE", COLUMNS, COLUMN_TYPES, values); } -/* - * Scans for duplicates based on the configured pattern and the selected id field<br /> - * All values to the used placeholders have to be present in "pValuesToCheck"<br /> - * First all placeholders in the pattern will be replaced with their respective values. - * Then, the pattern is extended wo exclude the record on which the search is based on. - *<br /> - * If the usage of an external webservice has been activated, the result will then be given to the pFormatValuesConsumeWebserviceCallback via parameter.<br /> - * To access the values it is advised to run thru the parameter like an array and access its value by key which is the index field name. The entity - * field names can be converted using DuplicateScannerUtils.translateEntityToIndexFields - * <br /> - * <br /> - * Attention!<br /> - * If it's a single scanForDuplicates call it doesn't matter what the callback returns because after the callback, no more modifications follow before - * returning the data.<br /> - * If it's inside the RebuildCache the values have to be in the same format as the parameter - * - * @param {String} pFilterName Name of the filter - * @param {String} pTargetEntity Respective target entity - * @param {{"key", "value"}} pValuesToCheck An object with key value pairs which hold the name of the entity field as key and it's value as value. See the example "valuesToCheck" - * @param {function} pFormatValuesConsumeWebserviceCallback Null if no external service is used otherwise a function with one parameter. - * If the function is called is based on the configuration of the current scanner - * @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured resultfields, if an external webservices was used - * the structure is defined by the parameterized function "pFormatValuesConsumeWebserviceCallback" - * - * @example - * var filterName = "PersonDuplicates"; - * let targetEntity = "Person_entity"; - * let valuesToCheck = {}; - * var entityModel = project.getEntityStructure(targetEntity); +/** + * Updates HasDuplicate by entity (used in onInser and onUpdate) * - * //Read the values of all available entity fields and write the fieldname/value combination - * //as key/value pairs into an object. This is used to trigger the scan for duplicates - * let fieldValue = ""; - * let entityFields = []; - * for (fieldname in entityModel.fields) - * { - * field = entityModel.fields[fieldname]; - * if(field.fieldType == project.ENTITYFIELDTYPE_FIELD) - * { - * fieldValue = vars.get("$field." + field.name); - * - * if(fieldValue != null && fieldValue != "") - * { - * valuesToCheck[field.name] = fieldValue; - * } - * } - * } - * - * let formatToJsonAndCallWsCallback = function(pPossibleDuplicatesRay) - * { - * let indexResultFields = DuplicateScannerUtils.TranslateEntityToIndexFields(targetEntity, resultFields) - * - * //Run thru every duplicate result an read out the resultfields - * for (let i = 0; i < pPossibleDuplicatesRay.length; i++) - * { - * for (let b = 0; b < resultFields.length; b++) - * { - * let entityFieldName = resultFields[b]; - * let indexFieldName = indexResultFields[entityFieldName]; - * //format values - * } - * } - * //call webservice - * //reformat results to same structure as before - * return pPossibleDuplicatesRay; - * }; - * - * //The result values can be accessed as seen above in "formatToJsonAndCallWsCallback" - * DuplicateScannerUtils.ScanForDuplicates(filterName, targetEntity, valuesToCheck, - * formatToJsonAndCallWsCallback); + * @param {string} pTargetEntity the target entity e.g.: Organisation_entity */ -DuplicateScannerUtils.scanForDuplicates = function(pFilterName, pTargetEntity, pValuesToCheck, pFormatValuesConsumeWebserviceCallback) +DuplicateScannerUtils.updateHasDuplicateEntry = function(pTargetEntity) { - let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity); - let resultFields = DuplicateScannerUtils.getResultFields(pFilterName, pTargetEntity); - - let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); - let entityFieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); - let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity); - - let entityFieldConfigValuesRay = DuplicateScannerUtils.buildEntityFieldConfigs(entityFieldConfigs, pValuesToCheck); - - //The first field in this Array must always be the configured id field. - let idValue = pValuesToCheck[entityIdField]; + var scanner = DuplicateScannerUtils.getScannerByEntity(pTargetEntity); + var indexsearchFilter = IndexsearchFilterUtils.fromFilter(scanner.filter); - return _DuplicateScannerUtils._scanForDuplicates(pTargetEntity, - entityFieldConfigValuesRay, resultFields, idValue, - pFormatValuesConsumeWebserviceCallback, useExternalWebservice, indexPattern) -} + var fields = indexsearchFilter.getFields(); + fields.add(scanner.idField); + fields = Array.from(fields); -/* - * Executes a indexsearch.lookupIndexField for eacht entity field in the parameterized array - * and returns it as Map. - * - * @param {String} pEntityName ...Name of the entity - * @param {String[]} pEntityFields Array of the entities Fields to translate to index fields - * @returns Map-like object where (key = entity field) and (value = index field) - * - * @example - * let entityResultFields = ["LASTNAME"]; - * let entityIndexFields = DuplicateScannerUtils.translateEntityToIndexFields("Person_entity", entityResultFields); - * - */ -DuplicateScannerUtils.translateEntityToIndexFields = function(pEntityName, pEntityFields) -{ - let entityIndexFields = {}; - for (let i = 0; i < pEntityFields.length; i++) + var entityObj = {}; + for(let i = 0; i < fields.length; i++) { - let entityFieldName = pEntityFields[i]; - - //todo remove if api has been fixed - if(entityFieldName.startsWith(pEntityName)) - entityFieldName = entityFieldName.replace(pEntityName + ".", ""); - - let indexField = indexsearch.lookupIndexField(pEntityName, entityFieldName); - entityIndexFields[pEntityFields[i]] = indexField; + entityObj[fields[i]] = vars.get("$field." + fields[i]); } - return entityIndexFields; -} - -/* - * - * Merges the source person into the target person. - * This - * - replaces the source's with the target's contactid in a predefined set of tables. - * - resets the standard communications of the source contact and keeps the ones of the target. - * - updates participants of campaigns and removes obsolet ones(which would be duplicates) - * - deletes the source person and contact - * - deletes the duplicate record, if one exists - * - deletes all unrelated-duplicate-relations containing the source contact id - * - * @param {String} pSourceContactId The contact to be integrated into another - * @param {String} pTargetContactId The contact in which the source gets integrated - * @returns {Boolean} if the merge was sucessful - */ -DuplicateScannerUtils.mergePerson = function(pSourceContactId, pTargetContactId) -{ - var sourcePersonId = newSelect("PERSON_ID") - .from("CONTACT") - .where("CONTACT.CONTACTID", pSourceContactId) - .cell(); - var targetPersonId = newSelect("PERSON_ID") - .from("CONTACT") - .where("CONTACT.CONTACTID", pTargetContactId) - .cell(); - - _DuplicateScannerUtils._deleteUniqueAttributes(pSourceContactId, pTargetContactId); - var isLinkedDataUpdated = _DuplicateScannerUtils._migrateLinkedContactData(pSourceContactId, pTargetContactId); - var isParticipantsUpdated = _DuplicateScannerUtils._migrateParticipantsToNewContact("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", - pSourceContactId, pTargetContactId); - _DuplicateScannerUtils._updateOtherContacts(pSourceContactId, sourcePersonId, targetPersonId); - - var deleteStatements = []; - if (sourcePersonId != targetPersonId) - deleteStatements.push(newWhere("PERSON.PERSONID", sourcePersonId).buildDeleteStatement()); - - deleteStatements.push(newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement()); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); - - //update binary - var metaData = db.getBinaryMetadata("CONTACT", "DOCUMENT", pSourceContactId, true, SqlUtils.getBinariesAlias()); - metaData.forEach(function (binaryMetaData) - { - db.updateBinaryAssignment(binaryMetaData.id, "CONTACT", "DOCUMENT", pTargetContactId, SqlUtils.getBinariesAlias()); - }); - var deletedRows = db.deletes(deleteStatements) - - DuplicateScannerUtils.deleteCachedDuplicate(pSourceContactId); - - return isLinkedDataUpdated || isParticipantsUpdated || deletedRows > 0; -} - -DuplicateScannerUtils.createMergeSuccessActivity = function(pSourceContactId, pTargetContactId, pCurrentContactId, pContext) -{ - var activityDataForInsert = { - subject: translate.withArguments("A %0 record has been merged", [pContext]), - content: translate.withArguments("%0 with ID \"%1\" has been integrated into the %0 with the ID \"%2\"", [pContext, pSourceContactId, pTargetContactId]), - //categoryKeywordId: $KeywordRegistry.ac - directionKeywordId: $KeywordRegistry.activityDirection$internal(), - responsibleContactId: pCurrentContactId - }; - var activityLinks = [[pContext, pTargetContactId]]; - - return ActivityUtils.insertNewActivity(activityDataForInsert, activityLinks, null, db.getCurrentAlias()); -} - -DuplicateScannerUtils.mergeOrganisation = function(pSourceContactId, pTargetContactId) -{ - var sourceOrganisationId = newSelect("ORGANISATION_ID") - .from("CONTACT") - .where("CONTACT.CONTACTID", pSourceContactId) - .cell(); - - var targetOrganisationId = newSelect("ORGANISATION_ID") - .from("CONTACT") - .where("CONTACT.CONTACTID", pTargetContactId) - .cell(); - - _DuplicateScannerUtils._deleteUniqueAttributes(pSourceContactId, pTargetContactId); - _DuplicateScannerUtils._migrateLinkedContactData(pSourceContactId, pTargetContactId); - _DuplicateScannerUtils._migrateParticipantsToNewContact("CAMPAIGNPARTICIPANT", "CONTACT_ID", "CAMPAIGN_ID", - pSourceContactId, pTargetContactId); - _DuplicateScannerUtils._migratePersonsToNewOrganisation(sourceOrganisationId, targetOrganisationId); + var selfId = entityObj[scanner.idField]; - var deleteStatements = []; - deleteStatements.push(newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement()); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteOrganisationAndContactQuery(sourceOrganisationId, pSourceContactId)); - deleteStatements = deleteStatements.concat(_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery(pSourceContactId)); + var indexPattern = indexsearchFilter.buildQuery(entityObj); + var ids = DuplicateScannerUtils.getDuplicateIds(pTargetEntity, indexPattern, selfId); - //update binary - var metaData = db.getBinaryMetadata("CONTACT", "DOCUMENT", pSourceContactId, true, SqlUtils.getBinariesAlias()); - metaData.forEach(function (binaryMetaData) - { - db.updateBinaryAssignment(binaryMetaData.id, "CONTACT", "DOCUMENT", pTargetContactId, SqlUtils.getBinariesAlias()); - }); - var deletedRows = db.deletes(deleteStatements) - - DuplicateScannerUtils.deleteCachedDuplicate(pSourceContactId); - - return deletedRows >= 2; -} - -/* - * Creates an array of arrays containing the entity field config paired with it's value. - * - * @param {[]} pDuplicateFieldsConfig An Array with the configured fields in the form of [ENTITY_FIELD, IS_ID, USE_FOR_SEARCH]. @see LoadDuplicateIndexFieldsConfiguration() - * @param {{"key", "value"}} pTargetRecordData One record containing the values for the configured fields. It's in the format of {"key(=EntityFieldName", "Value"} - * @return {[[]]} An array of arrays containing the entity field config an its value. ["{entityfield: FIRSTNAME}", "PETER"]] - * @example - * pDuplicateFieldsConfig - * ["CONTACTID", true, false] - * ["FIRSTNAME", false, true] - * - * pTargetRecordData - * ["d786045c-8b21-4f22-b6d9-72be9f61c04d", "PETER"] - * - * => ["{entityfield: FIRSTNAME}", "PETER"]] - */ -DuplicateScannerUtils.buildEntityFieldConfigs = function(pDuplicateFieldsConfig, pTargetRecordData) -{ - let INDEX_CONFIG_ENTITY_FIELD = 0; - let entityFieldConfigValuesRay = []; - /* - * Based on the parameterized filter field names and the values loaded via the query, - * an array which contains records in the style of ["FilterFieldName", "FilterFieldValueFromQuery"] gets created. - * This is mandatory to run the scan for this record. - */ - for (let i = 0; i < pDuplicateFieldsConfig.length; i++) + DuplicateScannerUtils.deleteHasDuplicateEntries(pTargetEntity, [selfId]); + if(ids.length > 0) { - let fieldConfig = JSON.parse("{" + pDuplicateFieldsConfig[i] + "}"); - let entityField = fieldConfig.entityfield; - let entityFieldValue = pTargetRecordData[entityField]; - if(entityFieldValue == null) - entityFieldValue = ""; - else - entityFieldValue = indexsearch.escapeString(entityFieldValue).trim(); - - let exclude = fieldConfig.exclude; - if (exclude) - { - exclude.forEach(function (excludeValue) - { - entityFieldValue = " " + entityFieldValue + " "; - entityFieldValue = entityFieldValue.replace(new RegExp( " " + excludeValue + " ", "gi"), ""); - }); - } - - let valuelength = fieldConfig.length; - if ( valuelength ) - entityFieldValue = entityFieldValue.substr(0, parseInt(valuelength)) + "*"; - - let empty = fieldConfig.empty; - if ( !empty && entityFieldValue == "") - entityFieldValue = "*"; - - entityFieldValue = entityFieldValue.trim(); - entityFieldConfigValuesRay.push([pDuplicateFieldsConfig[i], entityFieldValue]); + DuplicateScannerUtils.insertHasDuplicateEntry(pTargetEntity, selfId, ids.length); } - return entityFieldConfigValuesRay.length > 0 ? entityFieldConfigValuesRay : [["", ""]]; -} - -DuplicateScannerUtils.getBlockSize = function() -{ - return project.getPreferenceValue("custom.duplicates.dataBlockSize", "5000"); -} - -DuplicateScannerUtils.getEntityFieldsFromConfig = function(pFilterName, pTargetEntity) -{ - let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); - if(indexPattern == null || indexPattern == "") - return []; - let fieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); - if(fieldConfigs == null || fieldConfigs.length < 1) - return []; - let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs(fieldConfigs); - let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity); - entityFields.push(entityIdField); - return entityFields; } /** - * Loads the configured entity fields required for the given duplicate scanner. + * Returns a DuplicateScanner object by the given entity name + * + * @param {string} pTargetEntity a entity name e.g.: Organisation_entity * - * @param {String} pFilterName the name of the scanner - * @param {String} pTargetEntity the target entity - * @return {Object} an object with two properties: - * <ul> - * <li>entityFields: array of entity fields</li> - * <li>entityIdField: the id field name as string</li> - * </ul> + * @returns {object} the scanner object */ -DuplicateScannerUtils.getEntityFieldObjectFromConfig = function (pFilterName, pTargetEntity) +DuplicateScannerUtils.getScannerByEntity = function(pTargetEntity) { - var indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity); - if (!indexPattern) - return null; - var fieldConfigs = _DuplicateScannerUtils._loadEntityFieldConfigsFromPattern(indexPattern); - if (fieldConfigs == null || fieldConfigs.length === 0) - return null; - + var duplicateScanner = newSelect([ + "DUPLICATESCANNER.ID_FIELD_NAME", + "DUPLICATESCANNER.SCAN_PATTERN" + ]) + .from("DUPLICATESCANNER") + .where("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity).arrayRow(); + var filterObj = JSON.parse(duplicateScanner[1]).filter; return { - entityFields : _DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs(fieldConfigs), - entityIdField : _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity) + idField: duplicateScanner[0], + filter: filterObj }; } -DuplicateScannerUtils.getUnrelatedRelationsForDuplicate = function(pDuplicateId) -{ - let unrelatedIds = []; - let duplicateIds = newSelect("SOURCEDUPLICATEID, UNRELATEDDUPLICATEID") - .from("UNRELATEDDUPLICATES") - .where("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pDuplicateId) - .or("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pDuplicateId) - .table(); - let sourceDuplicateId = ""; - let unrelatedDuplicateId = ""; - for (let i = 0; i < duplicateIds.length; i++) - { - sourceDuplicateId = duplicateIds[i][0]; - unrelatedDuplicateId = duplicateIds[i][1]; - - if(sourceDuplicateId != null && sourceDuplicateId != "" && sourceDuplicateId != pDuplicateId) - { - unrelatedIds.push(sourceDuplicateId); - } - else if(unrelatedDuplicateId != null && unrelatedDuplicateId != "" && unrelatedDuplicateId != pDuplicateId) - { - unrelatedIds.push(unrelatedDuplicateId); - } - } - return unrelatedIds; -} - -function _DuplicateScannerUtils() {} - -_DuplicateScannerUtils._loadEntityFieldsFromFieldConfigs = function(pEntityFieldConfigs) -{ - let fieldNames = []; - for (let i = 0; i < pEntityFieldConfigs.length; i++) - { - let fieldConfig = JSON.parse("{" + pEntityFieldConfigs[i] + "}"); - let entityField = fieldConfig.entityfield; - fieldNames.push(entityField); - } - return fieldNames; -} - -/* - * Gets the Pattern for the scanner - * A pattern usually contains placeholders in the style of "{entityFieldName]" +/** + * Calls the index search and returns the found duplicate ids * - * @param {String} pScannerName Name of the filter to use - * @param {String} pTargetEntity The target entity which has been assigned to the filters configuration - * @returns {String} Scan pattern as string - */ -_DuplicateScannerUtils._loadIndexPattern = function(pScannerName, pTargetEntity) -{ - let scanPattern = _DuplicateScannerUtils._filterToScanPattern( newSelect("SCAN_PATTERN") - .from("DUPLICATESCANNER") - .where("DUPLICATESCANNER.FILTER_NAME", pScannerName) - .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .cell()); - scanPattern = scanPattern.trim(); - return scanPattern; -} - - -_DuplicateScannerUtils._filterToScanPattern = function(filterString) { - if (filterString == "") - return ""; - try - { - var filter = JSON.parse(filterString); - } - catch (err) - { - return ""; - } - return _DuplicateScannerUtils._filterChildsToScanPattern(filter.filter.childs, filter.filter.operator); -} - -_DuplicateScannerUtils._filterChildsToScanPattern = function(childs, operator) { - let group = ""; - for (let i = 0; i < childs.length; i++) { - if (group != ""){ - group = group + " " + operator+ " "; - } - if (childs[i].type == "row"){ - group = group + _DuplicateScannerUtils._filterChildsRowToScanPattern(childs[i].name, childs[i].operator, childs[i].value); - } else if (childs[i].type == "group") { - group = group + "(" + _DuplicateScannerUtils._filterChildsToScanPattern(childs[i].childs, childs[i].operator) + ")"; - } - } - return group; -} - -_DuplicateScannerUtils._filterChildsRowToScanPattern = function (name, operator, value) { - let row = ""; - row = row + name.toLowerCase() + ':({"entityfield":"' + name + '"'; - switch (operator){ - case "NOT_EQUAL": - case "CONTAINSNOT": - row = row + ', "exclude":' + value; - case "ISNOTNULL": - row = row + ', "empty":' + false; - } - row = row + '})'; - return row; -} - -_DuplicateScannerUtils._loadEntityFieldConfigsFromPattern = function(indexPattern) -{ - return indexPattern.match(/[^{}]+(?=\})/g); -} - -_DuplicateScannerUtils._replacePlaceholderForValuesInPattern = function(pIndexPattern, pEntityFieldValueRays) -{ - let INDEX_ENTITY_FIELD_NAME = 0; - let INDEX_ENTITY_FIELD_VALUE = 1; - - let placeholder = ""; - let fieldValue = ""; - for (let i = 0; i < pEntityFieldValueRays.length; i++) - { - placeholder = "{" + pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_NAME] + "}"; - fieldValue = pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_VALUE]; - pIndexPattern = pIndexPattern.replace(placeholder, fieldValue); - } - return pIndexPattern; -} - -_DuplicateScannerUtils._loadEntityIdField = function(pFilterName, pTargetEntity) -{ - return newSelect("ID_FIELD_NAME") - .from("DUPLICATESCANNER") - .where("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .cell(); -} - -/* - * @see DuplicateScannerUtils.scanForDuplicates for the documentation - */ -_DuplicateScannerUtils._scanForDuplicates = function(pTargetEntity, pEntityFieldConfigValuesRay, -pResultFields, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern) -{ - //No filterfields/indexpattern => No indexsearch - if(pEntityFieldConfigValuesRay.length < 1 || pIndexPattern == null || pIndexPattern == "") - return null; - - let possibleDuplicates = []; - let ignoreSourceRecordPattern = _DuplicateScannerUtils._getIgnoreSourceRecordPattern(pRecordIdValueToIgnore); - - let indexPatternWithValues = _DuplicateScannerUtils._replacePlaceholderForValuesInPattern(pIndexPattern, pEntityFieldConfigValuesRay); - - possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, indexPatternWithValues, ignoreSourceRecordPattern, pResultFields, 100); - - if(possibleDuplicates == null) - return null; - - possibleDuplicates = possibleDuplicates[indexsearch.HITS]; - - if(pUseExternalWebservice && pFormatValuesConsumeWebserviceCallback != null) - possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]); - - return possibleDuplicates; -} - -/* - * Returns a bool which say wether or not an external service should be used - * - * @param {String} pFilterName Name of the filter - * @param {String} pTargetEntity Entity which has been configured - * @returns {Bool} True = use, False = no use - */ -_DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetEntity) -{ - let isUseWebservice = newSelect("EXTERNAL_SERVICE_USAGE_ALLOWED") - .from("DUPLICATESCANNER") - .where("DUPLICATESCANNER.FILTER_NAME", pFilterName) - .and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity) - .cell(); - return isUseWebservice != 0; -} - -/* - * Runs the indexsearch based on the given parameters. - * If the "pEntityFieldValueRays" is empty, no search will be executed. + * @param {string} pTargetEntity search for duplicate in this entity e.g.: Organisation_entity + * @param {string} pIndexPattern the index pattern as string + * @param {string=} pSelfId optionally the own id (this will be excluded) * - * @param {String} pTargetEntity Entity which has been configured - * @param {String} pIndexPatternWithValues The pattern used to search. Has to contain the values already. - * @param {String} pIdFilter The filter pattern used to exclude the current record from the result. - * @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.getResultFields" - * @param {String} pResultSetRows todo - * @returns {[["key", "value"]] || null} Array of Key-Value-Pairs based on the configured pResultFields, if no pattern exists null + * @returns {string[]} the duplicate ids */ -_DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pIndexPatternWithValues, pIdFilter, pResultFields, pResultSetRows) +DuplicateScannerUtils.getDuplicateIds = function(pTargetEntity, pIndexPattern, pSelfId) { - //logging.log(pIndexPatternWithValues) - //The indexPattern can't be null because it is required to run the search. - if(pIndexPatternWithValues == null || pIndexPatternWithValues == "") - return null; let indexQuery = indexsearch.createIndexQuery() - .setPattern(pIndexPatternWithValues) - .setEntities([pTargetEntity]) -// .addFilter(pIdFilter); - // .setDefaultOperator(indexsearch.OPERATOR_AND) - //.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID") - //.setRows(pResultSetRows); - if(pIdFilter && pIdFilter.length > 0) - indexQuery.addFilter(pIdFilter); - - indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields); - return indexsearch.searchIndex(indexQuery); -} - -/* - * Sets each field of the given array as resultfield on the given query. - * Supports indexsearch internal fields aswell - * (indexsearch.FIELD_ID, indexsearch.FIELD_TITLE, indexsearch.FIELD_TYPE, indexsearch.FIELD_DESCRIPTION) - * - * @param {IndexQuery} pIndexQuery An indexquery created with "indexsearch.createIndexQuery()" - * @param {String[]} pResultFields The result field config. Use "DuplicateScannerUtils.getResultFields" - * @returns {IndexQuery} IndexQuery with the resultfields set - */ -_DuplicateScannerUtils._setResultFields = function(pIndexQuery, pResultFields) -{ - let resultIndexFields = []; - let resultFields = []; - for (let i = 0; i < pResultFields.length; i++) + .setPattern(pIndexPattern) + .setEntities([pTargetEntity]); + if(pSelfId) { - if(pResultFields[i] == indexsearch.FIELD_ID - || pResultFields[i] == indexsearch.FIELD_TITLE - || pResultFields[i] == indexsearch.FIELD_TYPE - || pResultFields[i] == indexsearch.FIELD_DESCRIPTION) - { - resultIndexFields.push(pResultFields[i]); - } - else - resultFields.push(pResultFields[i]); + // creates -_local_id_:"<pSelfId>" + var optFilter = indexsearch.buildPattern(indexsearch.createPatternConfig().minus( + indexsearch.createPhraseTerm(pSelfId) + .setIndexField(indexsearch.FIELD_ID) + )); + indexQuery = indexQuery.addFilter(optFilter); } - if(resultIndexFields.length > 0) - pIndexQuery = pIndexQuery.addResultIndexFields(resultIndexFields); - - if(resultFields.length > 0) + var indexResult = indexsearch.searchIndex(indexQuery); + indexResult = indexResult[indexsearch.HITS]; + if(indexResult === null || indexResult.length === 0) { - pIndexQuery = pIndexQuery.addResultFields(resultFields); + return []; } - return pIndexQuery; -} - -/* - * Creates the queries to insert new duplicates into a new cluster based on the pDuplicatesRay - * - * @param {String[]} pDuplicatesRay Array of duplicate ids - * @param {String} pTargetEntity Entity which has been configured - * @returns {String[]} Array of insert queries - */ -_DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay, pTargetEntity) -{ - return _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(pDuplicatesRay, pTargetEntity, util.getNewUUID()); -} - -/* - * Creates the queries to insert new duplicates into a new cluster based on the pDuplicatesRay - * - * @param {String[]} pDuplicatesRay Array of duplicate ids - * @param {String} pTargetEntity Entity which has been configured - * @param {String} pClusterId Clusters id to which the duplicates are in relation - * @returns {String[]} Array of insert queries - */ -_DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay, pTargetEntity, pClusterId) -{ - let duplicatesToInsertQueries = []; - let cols = ["ID", "CLUSTERID", "DUPLICATEID", "TARGET_ENTITY"]; - if (!pClusterId) - pClusterId = util.getNewUUID(); - - for (let i = 0; i < pDuplicatesRay.length; i++) + var duplicateIds = []; + for(let i = 0; i < indexResult.length; i++) { - let newId = util.getNewUUID(); - let valuesToInsert = [newId, pClusterId, pDuplicatesRay[i], pTargetEntity]; - - duplicatesToInsertQueries.push(["DUPLICATECLUSTERS", cols, null, valuesToInsert]); + duplicateIds.push(indexResult[i][indexsearch.FIELD_ID]); } - return duplicatesToInsertQueries; + return duplicateIds; } -/* - * Deletes all clusters - * - * @returns {String} Count of records deleted - */ -_DuplicateScannerUtils._deleteDuplicateClusters = function () -{ - return db.deleteData("DUPLICATECLUSTERS"); -} - -/* - * All records with contactId = sourceContactId get updated, which are not assigned to the same "group" as the targetContactId. - * This is because otherwise there would now be in total two "participants" in the same "group" as opposed to one before. - * Also if they already are in the same "group" those records shouldn't be updated because it would lead to the same outcome. - * - * @returns {Boolean} If records have been updated +/** + * Does the same as getDuplicateIds but takes in a scanner (by entity) instead of an indexPattern + * + * @param {string} pTargetEntity the entity name used to get the Scanner + * @param {string} pTargetUid all variables will be loaded by this id + * @param {boolean=} pIncludeIgnored if true ignored duplicates will be included in the result array + * + * @returns {string[]} the duplicate ids */ -_DuplicateScannerUtils._migrateParticipantsToNewContact = function (pTableName, pContactIdColumn, pAssignableIdColumn, pSourceContactId, pTargetContactId) +DuplicateScannerUtils.getDuplicateIdsByEntityScanner = function(pTargetEntity, pTargetUid, pIncludeIgnored) { - var excludedIds = newSelect(pAssignableIdColumn) - .from(pTableName) - .where([pTableName, pContactIdColumn], pTargetContactId) - .arrayColumn(); + var scanner = DuplicateScannerUtils.getScannerByEntity(pTargetEntity); + var indexsearchFilter = IndexsearchFilterUtils.fromFilter(scanner.filter); - var updateCount = newWhereIfSet([pTableName, pAssignableIdColumn], excludedIds, SqlBuilder.NOT_IN()) - .and([pTableName, pContactIdColumn], pSourceContactId) - .updateFields(new Map().set(pContactIdColumn, pTargetContactId), pTableName); - - var deleteCount = newWhere([pTableName, pContactIdColumn], pSourceContactId) - .tableName(pTableName) - .deleteData(); + var targetLoadConfig = entities.createConfigForLoadingRows() + .entity(pTargetEntity) + .uid(pTargetUid) // Array.from because getFields returns a Set + .fields(Array.from(indexsearchFilter.getFields())); + var targetRow = entities.getRow(targetLoadConfig); - return updateCount > 0 || deleteCount > 0; -} - -_DuplicateScannerUtils._buildDeleteOrganisationAndContactQuery = function(pSourceOrganisationId, pSourceContactId) -{ - return [ - newWhere("ORGANISATION.ORGANISATIONID", pSourceOrganisationId).buildDeleteStatement(), - newWhere("CONTACT.CONTACTID", pSourceContactId).buildDeleteStatement() - ]; -} - -_DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery = function(pSourceContactId) -{ - return [ - newWhere("UNRELATEDDUPLICATES.SOURCEDUPLICATEID", pSourceContactId).buildDeleteStatement(), - newWhere("UNRELATEDDUPLICATES.UNRELATEDDUPLICATEID", pSourceContactId).buildDeleteStatement() - ]; + var indexPattern = indexsearchFilter.buildQuery(targetRow); + var ids = DuplicateScannerUtils.getDuplicateIds(pTargetEntity, indexPattern, pTargetUid); + return pIncludeIgnored ? ids : DuplicateScannerUtils.filterIgnored(pTargetEntity, pTargetUid, ids); } -/* - * Creates a filter pattern which excludes results with the provided id from the search. - * - * @param {String} pRecordIdValueToIgnore The id value to exclude from the result. - * @returns {String} Filter pattern which excludes hits with the given id (index) from the result. +/** + * Does the same as getDuplicateIdsByEntityScanner but gets the variables via vars.get + * + * @param {string} pTargetEntity the entity name used to get the Scanner + * @param {boolean=} pIncludeIgnored if true ignored duplicates will be included in the result array + * + * @returns {string[]} the duplicate ids */ -_DuplicateScannerUtils._getIgnoreSourceRecordPattern = function(pRecordIdValueToIgnore) +DuplicateScannerUtils.getDuplicateIdsByEntityVars = function(pTargetEntity, pIncludeIgnored) { - // creates -_local_id_:"<pRecordIdValueToIgnore>" - if(pRecordIdValueToIgnore && pRecordIdValueToIgnore.length > 0) - return indexsearch.buildPattern(indexsearch.createPatternConfig().minus( - indexsearch.createPhraseTerm(pRecordIdValueToIgnore) - .setIndexField(indexsearch.FIELD_ID) - )); - return null; -} + var scanner = DuplicateScannerUtils.getScannerByEntity(pTargetEntity); + var indexsearchFilter = IndexsearchFilterUtils.fromFilter(scanner.filter); -_DuplicateScannerUtils._migrateLinkedContactData = function (pSourceContactId, pTargetContactId) -{ - var updateStatements = new Map(); - var currentAlias = db.getCurrentAlias(); - - var setStandardsStatements = []; - var [standardPhone, standardMail, standardAddressId] = newSelect([ - "(" + CommUtil.getStandardSubSqlPhone() + ")", - "(" + CommUtil.getStandardSubSqlMail() + ")", - "CONTACT.ADDRESS_ID" - ]) - .from("CONTACT") - .where("CONTACT.CONTACTID", pTargetContactId) - .arrayRow(); - - //if the targetContact already has a standard phone comm, set the comm from the sourceContact as not standard - if (standardPhone) - { - setStandardsStatements.push( - newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId) - .and("COMMUNICATION.MEDIUM_ID", CommUtil.getMediumIdsByCategory("PHONE"), SqlBuilder.IN()) - .buildUpdateStatement({"ISSTANDARD" : "0"}) - ); - } - if (standardMail) - { - setStandardsStatements.push( - newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId) - .and("COMMUNICATION.MEDIUM_ID", CommUtil.getMediumIdsByCategory("EMAIL"), SqlBuilder.IN()) - .buildUpdateStatement({"ISSTANDARD" : "0"}) - ); - } - //set the standardaddress of the sourceContact as standard of the targetContact if it doesn't have one set yet - if(!standardAddressId) + var fields = indexsearchFilter.getFields(); + fields.add(scanner.idField); + fields = Array.from(fields); + + var entityObj = {}; + for(let i = 0; i < fields.length; i++) { - var sourceStandard = newSelect("CONTACT.ADDRESS_ID") - .from("CONTACT") - .where("CONTACT.CONTACTID", pSourceContactId) - .cell(); - setStandardsStatements.push(newWhere("CONTACT.CONTACTID", pTargetContactId).buildUpdateStatement({"ADDRESS_ID": sourceStandard})); + entityObj[fields[i]] = vars.get("$field." + fields[i]); } - updateStatements.set(currentAlias, setStandardsStatements); - - _DuplicateScannerUtils._getLinkedTableInfos(pTargetContactId).forEach(function ([tableName, columnName, additionalCondition, dbAlias]) - { - if (!dbAlias) - dbAlias = currentAlias; - - if (!updateStatements.has(dbAlias)) - updateStatements.set(dbAlias, []); - var statements = updateStatements.get(dbAlias); - - var updateValues = {}; - updateValues[columnName] = pTargetContactId; - - var updateCondition = new SqlBuilder(dbAlias).where([tableName, columnName], pSourceContactId).andIfSet(additionalCondition); - - //push must be used here to keep the reference - statements.push(updateCondition.buildUpdateStatement(updateValues, tableName)); - }); - - var totalChanges = 0; - - updateStatements.forEach(function (statements, alias) - { - totalChanges += db.updates(statements, alias); - }); - - totalChanges += newWhere("COMMUNICATION.CONTACT_ID", pSourceContactId).deleteData(); //delete leftover communications from the source contact - totalChanges += new AttributeRelationQuery(pSourceContactId).deleteAllAttributes(); //delete leftover attributes - - return totalChanges > 0; -} - -/* - * Contains all Tables and their fields which may contain the contact id to be replaced for the data alias - * - * @returns {String[[]]} Array in the format [TableName, ContactIdColumnName, AdditionalCondition, alias] - */ -_DuplicateScannerUtils._getLinkedTableInfos = function(pTargetContactId) -{ - //don't use communications that the target already has - var targetComms = newSelect("COMMUNICATION.ADDR") - .from("COMMUNICATION") - .where("COMMUNICATION.CONTACT_ID", pTargetContactId) - .arrayColumn(); - - var communicationDedupCondition = targetComms.length > 0 - ? newWhere("COMMUNICATION.ADDR", targetComms, SqlBuilder.NOT_IN()) - : ""; - - return[ - ["AB_APPOINTMENTLINK", "OBJECT_ROWID"], - ["AB_CTILOG", "CONTACT_ID"], - ["AB_OBJECTRELATION", "AB_OBJECTRELATIONID"], - ["AB_OBJECTRELATION", "OBJECT1_ROWID"], - ["AB_OBJECTRELATION", "OBJECT2_ROWID"], - ["AB_LOGHISTORY", "TABLENAMEID"], - ["ADDRESS", "CONTACT_ID"], - ["BULKMAILRECIPIENT", "CONTACT_ID"], - ["BULKMAIL", "TESTING_CONTACT_ID"], - ["CAMPAIGN", "EMPLOYEE_CONTACT_ID"], - ["CAMPAIGNSTEP", "EMPLOYEE_CONTACT_ID"], - ["COMMRESTRICTION", "CONTACT_ID"], - ["COMMRESTRICTION", "EMPLOYEE_INVOLVED"], - ["COMMUNICATION", "CONTACT_ID", communicationDedupCondition], - ["COMPETITION", "CONTACT_ID"], - ["CONTRACT", "CONTACT_ID"], - ["LETTERRECIPIENT", "CONTACT_ID"], - ["OBJECTMEMBER", "CONTACT_ID"], - ["OFFER", "CONTACT_ID"], - ["PRODUCT", "CONTACT_ID"], - ["PRODUCTPRICE", "CONTACT_ID"], - ["SALESORDER", "CONTACT_ID"], - ["SALESPROJECT", "CONTACT_ID"], - ["TASK", "REQUESTOR_CONTACT_ID"], - ["TASK", "EDITOR_CONTACT_ID"], - ["TASKLINK", "OBJECT_ROWID"], - ["ACTIVITY", "RESPONSIBLE"], - ["DSGVO", "CONTACT_ID"], - ["DSGVOINFO", "CONTACT_ID"], - ["TIMETRACKING", "CONTACT_ID"], - ["ACTIVITYLINK", "OBJECT_ROWID"], - ["AB_ATTRIBUTERELATION", "OBJECT_ROWID"], - - ["ASYS_CALENDARLINK", "DBID", "", SqlUtils.getSystemAlias()] - ]; -} - - -/* - * Returns wether or not a value should be substring'd - * - * @return true if pCountCharsOfValueToUse is a number, greater than 0 and smaller than the values length - */ -_DuplicateScannerUtils._isValueLongerThanCharsToUse = function(pValueLength, pCountCharsOfValueToUse) -{ - return !isNaN(pCountCharsOfValueToUse) - && pCountCharsOfValueToUse > 0 - && pValueLength > pCountCharsOfValueToUse; -} - -/* - * Returns wether or not the parameter isnt null and a number or not - * - * @param {String} pCountCharsOfValueToUse Hopefully a number - * @returns {String} True if parameter isnt null and a number, False if it's null or no number - */ -_DuplicateScannerUtils._isNotNullAndANumber = function(pCountCharsOfValueToUse) -{ - return pCountCharsOfValueToUse != null && !isNaN(pCountCharsOfValueToUse); + var indexPattern = indexsearchFilter.buildQuery(entityObj); + var ids = DuplicateScannerUtils.getDuplicateIds(pTargetEntity, indexPattern, entityObj[scanner.idField]); + return pIncludeIgnored ? ids : DuplicateScannerUtils.filterIgnored(pTargetEntity, entityObj[scanner.idField], ids); } -/* - * Persons get reassigned to new organisation - * - * @returns {Boolean} If records have been updated - */ -_DuplicateScannerUtils._migratePersonsToNewOrganisation = function (pSourceOrganisationId, pTargetOrganisationId) -{ - var updateCount = newWhereIfSet(["CONTACT", "ORGANISATION_ID"], pSourceOrganisationId) - .and("PERSON_ID is not null") - .updateFields(new Map().set("ORGANISATION_ID", pTargetOrganisationId), "CONTACT"); - - var deleteCount = newWhere(["CONTACT", "ORGANISATION_ID"], pSourceOrganisationId) - .and("PERSON_ID is not null") - .deleteData(); - return updateCount > 0 || deleteCount > 0; -} - -/* - * Person and organisations keep the max_count = 1 attribute of the target and the one from the source is deleted - * - * @returns {Boolean} If records have been deleted +/** + * Does the same as getDuplicateIdsByEntityScanner + * but the variables are directly passed in as an argument + * + * @param {string} pTargetEntity the entity name used to get the Scanner + * @param {object} pEntityObj the variables (should include all variables needed to build the indexPattern) + * @param {boolean=} pIncludeIgnored if true ignored duplicates will be included in the result array + * + * @returns {string[]} the duplicate ids */ -_DuplicateScannerUtils._deleteUniqueAttributes = function (pSourceContactId, pTargetContactId) +DuplicateScannerUtils.getDuplicateIdsByEntityObj = function(pTargetEntity, pEntityObj, pIncludeIgnored) { - var targetAttrUnique = newSelect("AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID") - .from("AB_ATTRIBUTERELATION") - .join("AB_ATTRIBUTEUSAGE on AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID = AB_ATTRIBUTEUSAGE.AB_ATTRIBUTE_ID") - .where("AB_ATTRIBUTEUSAGE.MAX_COUNT = 1") - .and("AB_ATTRIBUTERELATION.OBJECT_ROWID", pTargetContactId) - .table(); - var deleteCount = 0; + var scanner = DuplicateScannerUtils.getScannerByEntity(pTargetEntity); + var indexsearchFilter = IndexsearchFilterUtils.fromFilter(scanner.filter); - targetAttrUnique.forEach(function(attribute){ - deleteCount += newWhereIfSet("AB_ATTRIBUTERELATION.AB_ATTRIBUTE_ID", attribute) - .and("AB_ATTRIBUTERELATION.OBJECT_ROWID", pSourceContactId) - .deleteData(); - }); - - return deleteCount > 0; + var indexPattern = indexsearchFilter.buildQuery(pEntityObj); + var ids = DuplicateScannerUtils.getDuplicateIds(pTargetEntity, indexPattern, pEntityObj[scanner.idField]); + return pIncludeIgnored ? ids : DuplicateScannerUtils.filterIgnored(pTargetEntity, pEntityObj[scanner.idField], ids); } - -/* - * Update other contacts from the source - * - * @returns {Boolean} If records have been updated - */ - _DuplicateScannerUtils._updateOtherContacts = function (pSourceContactId, sourcePersonId, targetPersonId) -{ - var otherContacts = newSelect("PERSON.PERSONID") - .from("PERSON") - .join("CONTACT", "CONTACT.PERSON_ID = PERSON.PERSONID") - .where("PERSON.PERSONID", sourcePersonId) - .and("CONTACT.CONTACTID", pSourceContactId, SqlBuilder.NOT_EQUAL()) - .table(); - var updateCount = 0; - - otherContacts.forEach(function(person){ - updateCount += newWhere("CONTACT.PERSON_ID", person) - .updateFields({"PERSON_ID" : targetPersonId}, "CONTACT"); - }); - - return updateCount > 0; -} \ No newline at end of file diff --git a/process/IndexSearch_lib/process.js b/process/IndexSearch_lib/process.js index be35ad2cc8c0a24fabe76bd57ba2a64d9ce9b310..7a7b7a0bdf3c29e45a096c7ea29b3f24c2d9e3b7 100644 --- a/process/IndexSearch_lib/process.js +++ b/process/IndexSearch_lib/process.js @@ -160,4 +160,87 @@ IndexsearchUtils.createAffectedInfoContainer = function(changedIdValue, changedT } }); return res; -} \ No newline at end of file +} + + +function IndexsearchFilterUtils() {} +IndexsearchFilterUtils.fromFilter = function(pFilter) +{ + if(pFilter["type"] == "group") + { + return IndexsearchFilterGroup.fromFilter(pFilter["childs"], pFilter["operator"]); + } + else if(pFilter["type"] == "row") + { + return IndexsearchFilterRow.fromFilter(pFilter["name"], pFilter["operator"], pFilter["value"]); + } + throw new Error("Unknown filter node type: " + pFilter["type"]); +} + +function IndexsearchFilterGroup(pChilds, pOperator) +{ + this.childs = pChilds; + this.operator = pOperator; +} +IndexsearchFilterGroup.prototype.getFields = function() +{ + var fields = new Set(); + for(let i = 0; i < this.childs.length; i++) + { + this.childs[i].getFields().forEach(fields.add, fields); + } + return fields; +} +IndexsearchFilterGroup.prototype.buildQuery = function(pFieldValues) +{ + return this.childs.map(function(curr) { return curr.buildQuery(pFieldValues); }) + .filter(function(curr) { return curr != null; }) + .join(" " + this.operator + " "); +} +IndexsearchFilterGroup.fromFilter = function(pChilds, pOperator) +{ + return new IndexsearchFilterGroup( + pChilds.map(function(curr) + { + return IndexsearchFilterUtils.fromFilter(curr); + }), + pOperator + ); +} + +function IndexsearchFilterRow(pName, pEmpty, pExclude) +{ + this.name = pName; + this.empty = pEmpty; + this.exclude = pExclude; +} +IndexsearchFilterRow.prototype.getFields = function() +{ + return new Set([this.name]); +} +IndexsearchFilterRow.prototype.buildQuery = function(pFieldValues) +{ + var fieldValue = pFieldValues[this.name] ? pFieldValues[this.name] : ""; + if(!this.empty && fieldValue == "") + { + return null; + } + var valueStr = fieldValue; + for(let i = 0; i < this.exclude.length; i++) + { + valueStr = valueStr.replace(new RegExp(this.exclude[i], "gi"), ""); + } + return this.name.toLowerCase() + ':({"entityfield":"' + valueStr.trim() + '"})'; +} +IndexsearchFilterRow.fromFilter = function(pName, pOperator, pValue) +{ + if(pOperator == "NOT_EQUAL" || pOperator == "CONTAINSNOT") + { + return new IndexsearchFilterRow(pName, true, JSON.parse(pValue)); + } + else if(pOperator == "ISNOTNULL") + { + return new IndexsearchFilterRow(pName, false, []); + } + throw new Error("Unknown filterrow operator: " + pOperator); +} diff --git a/process/Leadimport_lib/process.js b/process/Leadimport_lib/process.js index ea5194df48073894f1d20596c0c477b524b3dd69..da691f5000b2546251c1ec911d0ff2456971c0fd 100644 --- a/process/Leadimport_lib/process.js +++ b/process/Leadimport_lib/process.js @@ -918,19 +918,19 @@ LeadImportUtils.scanLeadDups = function(pAllContactData) if (orgObj != undefined && Object.keys(orgObj).length > 0)//checks if there is an ORGANISATIONDUPLICATE { - let scanResultsOrg = DuplicateScannerUtils.scanForDuplicates("OrganisationDuplicates", "Organisation_entity", orgObj, null); - if (scanResultsOrg != null && scanResultsOrg.length > 0) + let scanResultsOrg = DuplicateScannerUtils.getDuplicateIdsByEntityObj("Organisation_entity", orgObj); + if (scanResultsOrg.length > 0) { - let insertCountOrg = DuplicateScannerUtils.cacheNewScanResults(orgObj["CONTACTID"], scanResultsOrg, "Organisation_entity"); + DuplicateScannerUtils.insertHasDuplicateEntry("Organisation_entity", orgObj["CONTACTID"], scanResultsOrg); dupOrg = true; } } if (persObj != undefined && Object.keys(persObj).length > 0)//checks if there is an PERSONDUPLICATE { - let scanResultsPers = DuplicateScannerUtils.scanForDuplicates("PersonDuplicates", "Person_entity", persObj, null); - if (scanResultsPers != null && scanResultsPers.length > 0) + let scanResultsPers = DuplicateScannerUtils.getDuplicateIdsByEntityObj("Person_entity", persObj); + if (scanResultsPers.length > 0) { - let insertCountPers = DuplicateScannerUtils.cacheNewScanResults(persObj["CONTACTID"], scanResultsPers, "Person_entity"); + DuplicateScannerUtils.insertHasDuplicateEntry("Person_entity", persObj["CONTACTID"], scanResultsPers); dupPers = true; } } diff --git a/process/RebuildAllDuplicateCaches_serverProcess/process.js b/process/RebuildAllDuplicateCaches_serverProcess/process.js deleted file mode 100644 index b08a0abf9efa74df1e609510b0cd09d63a06d1bf..0000000000000000000000000000000000000000 --- a/process/RebuildAllDuplicateCaches_serverProcess/process.js +++ /dev/null @@ -1,55 +0,0 @@ -import("system.logging"); -import("DuplicateScanner_lib"); - -/* - * Serverprocess to reload duplicates - * - * Step #1: - * All Clusters for the specified targetEntity are deleted. - * Relations between records marked as unrelated are not deleted at this time - * - * Step #2 - * The duplicates cache gets rebuilt - * - * Step #3: - * All unrelated relations between duplicate ids hold their respective cluster id. - * If the same combination of duplicate ids exists in the duplicate cache after the rebuild, - * it is assumed that those records remain duplicates. - * Therefore the saved clusterId gets refreshed with the newly created clusters id. - * - * Relations between unrelated duplicates are deleted, if no cluster contains the same combination of duplicate ids - */ - -// Build Person duplicate cache - -var filterName = "PersonDuplicates"; -var targetEntity = "Person_entity"; -var recordBlockSize = DuplicateScannerUtils.getBlockSize(); - -logging.log(filterName + ": Delete duplicates -> "); -DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); - -logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); - -logging.log(filterName + ": Refresh unrelated duplicates -> "); -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -logging.log(filterName + ": Done rebuilding "); - - -// Build Organisation duplicate cache - -filterName = "OrganisationDuplicates"; -targetEntity = "Organisation_entity"; - -logging.log(filterName + ": Delete duplicates -> "); -DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); - -logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); - -logging.log(filterName + ": Refresh unrelated duplicates -> "); -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -logging.log(filterName + ": Done rebuilding "); \ No newline at end of file diff --git a/process/RebuildDuplicatesCache_serverProcess/process.js b/process/RebuildDuplicatesCache_serverProcess/process.js deleted file mode 100644 index 1e7f532f75a8b4a36d1c06de9cc4503bf6f74c62..0000000000000000000000000000000000000000 --- a/process/RebuildDuplicatesCache_serverProcess/process.js +++ /dev/null @@ -1,40 +0,0 @@ -import("system.logging"); -import("system.vars"); -import("DuplicateScanner_lib"); - -/* - * Serverprocess to reload duplicates - * - * Step #1: - * All Clusters for the specified targetEntity are deleted. - * Relations between records marked as unrelated are not deleted at this time - * - * Step #2 - * The duplicates cache gets rebuilt - * - * Step #3: - * All unrelated relations between duplicate ids hold their respective cluster id. - * If the same combination of duplicate ids exists in the duplicate cache after the rebuild, - * it is assumed that those records remain duplicates. - * Therefore the saved clusterId gets refreshed with the newly created clusters id. - * - * Relations between unrelated duplicates are deleted, if no cluster contains the same combination of duplicate ids - */ - -// Build Person duplicate cache - -var filterName = vars.get("$local.filterName"); -var targetEntity = vars.get("$local.targetEntity"); -var recordBlockSize = DuplicateScannerUtils.getBlockSize(); - -logging.log(filterName + ": Delete duplicates -> "); -DuplicateScannerUtils.deleteClustersByTargetEntity(targetEntity); - -logging.log(filterName + ": Recalculate duplicates -> "); -DuplicateScannerUtils.rebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null); - -logging.log(filterName + ": Refresh unrelated duplicates -> "); -DuplicateScannerUtils.refreshUnrelatedDuplicateRelations(targetEntity); - -logging.log(filterName + ": Done rebuilding "); - diff --git a/process/Sql_lib/process.js b/process/Sql_lib/process.js index f3708067a34ad01930d3ade922c9520011e1bb91..08022085da3612dc3044cef1351e9709a094d5c1 100644 --- a/process/Sql_lib/process.js +++ b/process/Sql_lib/process.js @@ -4628,4 +4628,33 @@ SqlUtils.escapeVars = function (pValue) if (typeof(pValue) == "string" && pValue.charAt(0) == "$") return "$" + pValue; return pValue; -} \ No newline at end of file +} + +SqlUtils.getSqlConditionalOperator = function(pOperator) +{ + switch(parseInt(pOperator)) + { + case 1: + return SqlBuilder.EQUAL(); + case 2: + return SqlBuilder.NOT_EQUAL(); + + case 4: + return SqlBuilder.LESS(); + case 5: + return SqlBuilder.LESS_OR_EQUAL(); + + case 3: + return SqlBuilder.GREATER(); + case 6: + return SqlBuilder.GREATER_OR_EQUAL(); + + case 11: + return "# is not null"; + case 12: + return "# is null"; + + default: + throw new Error("Unsupported operator " + pOperator); + } +} diff --git a/process/rebuildDuplicates_serverProcess/process.js b/process/rebuildDuplicates_serverProcess/process.js new file mode 100644 index 0000000000000000000000000000000000000000..28a6e21721a1cdf154ed41b1da2a8ec86ea1dd73 --- /dev/null +++ b/process/rebuildDuplicates_serverProcess/process.js @@ -0,0 +1,73 @@ +import("Sql_lib"); +import("system.db"); +import("system.entities"); +import("system.project"); +import("system.util"); +import("system.logging"); +import("system.notification"); +import("system.translate"); +import("system.vars"); +import("IndexSearch_lib"); +import("DuplicateScanner_lib"); + +var pFilterName = vars.get("$local.filterName"); +var pTargetEntity = vars.get("$local.targetEntity"); +var pTargetIdField = vars.get("$local.targetIdField"); +var pFilter = JSON.parse(vars.get("$local.filter")); + +newWhere("HASDUPLICATE.OBJECT_TYPE", pTargetEntity).deleteData(); +var indexsearchFilter = IndexsearchFilterUtils.fromFilter(pFilter); + +var TABLE_NAME = "HASDUPLICATE"; +var COLUMNS = [ + "HASDUPLICATEID", + "OBJECT_TYPE", "OBJECT_ROWID", + "DUPLICATECOUNT" +]; +var COLUMN_TYPES = db.getColumnTypes(TABLE_NAME, COLUMNS); +var inserts = []; + +var batchSize = parseInt(project.getPreferenceValue("custom.duplicates.dataBlockSize", "5000")); +var batchPos = 0; +while(true) +{ + var entityFields = indexsearchFilter.getFields(); + entityFields.add(pTargetIdField); + var entityRowsConfig = entities.createConfigForLoadingRows() + .entity(pTargetEntity) + .fields(Array.from(entityFields)) + .startrow(batchPos).count(batchSize); + var entityRows = entities.getRows(entityRowsConfig); + + for(var i = 0; i < entityRows.length; i++) + { + var currEntityRowId = entityRows[i][pTargetIdField]; + var indexPattern = indexsearchFilter.buildQuery(entityRows[i]); + var duplicates = DuplicateScannerUtils.getDuplicateIds(pTargetEntity, indexPattern, currEntityRowId); + + if(duplicates.length > 0) + { + var values = [ + util.getNewUUID(), + pTargetEntity, currEntityRowId, + duplicates.length.toFixed(0) + ]; + inserts.push([TABLE_NAME, COLUMNS, COLUMN_TYPES, values]); + } + } + + if(entityRows.length < batchSize) + { + break; + } + batchPos += batchSize; +} +db.inserts(inserts); + +notification.addNotificationWith(notification.createConfig() +.addUserWithId(vars.get("$sys.user")) +.notificationType("_____SYSTEM_NOTIFICATION_MESSAGE") +.caption(translate.text("Duplicaterow rebuild")) +.description(translate.withArguments("The duplicate row corrosponding to %0 has been rebuild", [pFilterName]))); + +logging.log(pFilterName + " has been rebuild"); diff --git a/process/RebuildAllDuplicateCaches_serverProcess/RebuildAllDuplicateCaches_serverProcess.aod b/process/rebuildDuplicates_serverProcess/rebuildDuplicates_serverProcess.aod similarity index 69% rename from process/RebuildAllDuplicateCaches_serverProcess/RebuildAllDuplicateCaches_serverProcess.aod rename to process/rebuildDuplicates_serverProcess/rebuildDuplicates_serverProcess.aod index 11432054cc7356358ef25e9d7b657ca5973bba2f..96927553d72d36b62dca9dda44a08745e539b03a 100644 --- a/process/RebuildAllDuplicateCaches_serverProcess/RebuildAllDuplicateCaches_serverProcess.aod +++ b/process/rebuildDuplicates_serverProcess/rebuildDuplicates_serverProcess.aod @@ -1,8 +1,9 @@ <?xml version="1.0" encoding="UTF-8"?> <process xmlns="http://www.adito.de/2018/ao/Model" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" VERSION="1.2.2" xsi:schemaLocation="http://www.adito.de/2018/ao/Model adito://models/xsd/process/1.2.2"> - <name>RebuildAllDuplicateCaches_serverProcess</name> + <name>rebuildDuplicates_serverProcess</name> <majorModelMode>DISTRIBUTED</majorModelMode> - <process>%aditoprj%/process/RebuildAllDuplicateCaches_serverProcess/process.js</process> + <process>%aditoprj%/process/rebuildDuplicates_serverProcess/process.js</process> + <alias>Data_alias</alias> <variants> <element>EXECUTABLE</element> </variants>