diff --git a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateClusters.xml b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateClusters.xml
index a89ca7761d3e6f9e0342679853ea3f43b8f3594f..929dc17ce7127c558abc067d33671089b13adb31 100644
--- a/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateClusters.xml
+++ b/.liquibase/Data_alias/basic/2019.2.1/Duplicates/create_duplicateClusters.xml
@@ -11,6 +11,7 @@
             <column name="DUPLICATEID" type="CHAR(36)" >
                 <constraints nullable="false"/>
             </column>
+            <column name="TARGET_ENTITY" type="NVARCHAR(200)"/>
         </createTable>
     </changeSet>
 </databaseChangeLog>
\ No newline at end of file
diff --git a/aliasDefinition/Data_alias/Data_alias.aod b/aliasDefinition/Data_alias/Data_alias.aod
index e03194b8fc731a88fcc9f0683fa7bb5d4b0d0540..1c93025030440d90999bffb19dcc5944b912b755 100644
--- a/aliasDefinition/Data_alias/Data_alias.aod
+++ b/aliasDefinition/Data_alias/Data_alias.aod
@@ -11351,6 +11351,20 @@
                 <title></title>
                 <description></description>
               </entityFieldDb>
+              <entityFieldDb>
+                <name>TARGET_ENTITY</name>
+                <dbName></dbName>
+                <primaryKey v="false" />
+                <columnType v="12" />
+                <size v="200" />
+                <scale v="0" />
+                <notNull v="false" />
+                <isUnique v="false" />
+                <index v="false" />
+                <documentation></documentation>
+                <title></title>
+                <description></description>
+              </entityFieldDb>
             </entityFields>
           </entityDb>
           <entityDb>
diff --git a/process/DuplicateScanner_lib/process.js b/process/DuplicateScanner_lib/process.js
index f2886b12d7c21629c24d26aac43c9ef940064d21..6b23e51adfce9a8df9d7ec31db6bdd0edb7ce9ad 100644
--- a/process/DuplicateScanner_lib/process.js
+++ b/process/DuplicateScanner_lib/process.js
@@ -168,7 +168,7 @@ pQueryTargetRecords, pFilterFields, pRecordIdFieldToIgnore)
         logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
         logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds));
         
-        let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds)
+        let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity)
         duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay);
     }
     logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries));
@@ -218,16 +218,16 @@ _DuplicateScannerUtils._getIdOfIdenticalCachedDuplicatesCluster = function (pRec
     
 }
 
-_DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay)
+_DuplicateScannerUtils._createInsertDuplicatesClusterQuery = function (pDuplicatesRay, pTargetEntity)
 {
     let duplicatesToInsertQueries = [];
-    let cols = ["ID", "CLUSTERID", "DUPLICATEID"];
+    let cols = ["ID", "CLUSTERID", "DUPLICATEID", "TARGET_ENTITY"];
     let newClusterUid = util.getNewUUID();
     
     for (i = 0; i < pDuplicatesRay.length; i++) 
     {
         let newId = util.getNewUUID();
-        let valuesToInsert = [newId, newClusterUid, pDuplicatesRay[i]];
+        let valuesToInsert = [newId, newClusterUid, pDuplicatesRay[i], pTargetEntity];
 
         duplicatesToInsertQueries.push(["DUPLICATECLUSTERS", cols, null, valuesToInsert]);
     }