Skip to content
Snippets Groups Projects
Commit 18fe01d2 authored by David Büchler's avatar David Büchler
Browse files

Pattern gets now filled and used to search

Refactorings and Bugfixes
parent 5b0060fc
No related branches found
No related tags found
No related merge requests found
......@@ -7,15 +7,14 @@ var targetEntity = "Person_entity";
var recordBlockSize = DuplicateScannerUtils.GetBlockSize();
logging.log("recordBlockSize -> " + recordBlockSize);
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
//let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
logging.log(filterName + ": Delete duplicates -> ");
DuplicateScannerUtils.DeleteDuplicateClustersByTargetEntity(targetEntity);
logging.log(filterName + ": Recalculate duplicates -> ");
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize,
duplicateFieldsConfig, resultFields, null);
DuplicateScannerUtils.RebuildDuplicatesCache(filterName, targetEntity, recordBlockSize, null);
logging.log(filterName + ": Refresh unrelated duplicates -> ");
DuplicateScannerUtils.RefreshUnrelatedDuplicateRelations(targetEntity);
......
......@@ -297,49 +297,32 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti
pRecordsBlockSize, pFormatValuesConsumeWebserviceCallback)
{
let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
let alreadyIdentifiedIds = [];
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(filterName, targetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(filterName, targetEntity);
//load all entity fields used in the prefilter
let filterFieldConfigs = _DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
//logging.log("filterFieldConfigs -> " + filterFieldConfigs);
let entityFieldsToLoad = _DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
//logging.log("entityFieldsToLoad initial-> " + entityFieldsToLoad);
//If the fields from the prefilter aren't in the entity fields to load, add them manually
if(filterFieldConfigs.length > 0)
{
let INDEX_FILTER_FIELD_NAME = 0;
for (let i = 0; i < filterFieldConfigs.length; i++)
{
let filterFieldConfig = filterFieldConfigs[i];
let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
//logging.log("filterFieldConfig -> " +filterFieldConfig );
if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
{
//logging.log("Noch nicht vorhanden, hinzufügen -> ");
duplicateFieldsConfig.push(filterFieldConfig);
entityFieldsToLoad.push(filterFieldName);
}
}
}
let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity);
let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromPattern(indexPattern);
let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity);
logging.log("indexPattern -> " +indexPattern );
logging.log("entityIdField -> " + entityIdField);
//Add the id field to ensure that it's on the first position
entityFields = [entityIdField].concat(entityFields);
//logging.log("entityFieldsToLoad vollsätndig-> " + entityFieldsToLoad);
//logging.log("duplicateFieldsConfig vollsätndig-> " + duplicateFieldsConfig);
logging.log("entityFields -> " + entityFields);
let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFieldsToLoad, 0, pRecordsBlockSize);
let alreadyIdentifiedIds = [];
let targetRecords = DuplicateScannerUtils.LoadEntityRecords(pTargetEntity, entityFields, 0, pRecordsBlockSize);
//logging.log("Initialer Block geladen targetRecords-> " + JSON.stringify(targetRecords));
let currentRecordIndex = pRecordsBlockSize;
while(targetRecords.length > 0)
{
foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pFilterName, pTargetEntity, targetRecords,
duplicateFieldsConfig, resultFields, useExternalWebservice,
pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds);
foundDuplicateIds = DuplicateScannerUtils.ScanRecords(pTargetEntity, targetRecords,
entityFields, resultFields, useExternalWebservice,
pFormatValuesConsumeWebserviceCallback, alreadyIdentifiedIds, indexPattern);
//logging.log("gefundene ids zum ignorieren foundDuplicateIds -> " + foundDuplicateIds);
alreadyIdentifiedIds = alreadyIdentifiedIds.concat(foundDuplicateIds);
//logging.log("Gesamte ignorierListe -> " + alreadyIdentifiedIds);
......@@ -359,8 +342,8 @@ DuplicateScannerUtils.RebuildDuplicatesCache = function(pFilterName, pTargetEnti
}
}
DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTargetRecordsData,
pDuplicateFieldsConfig, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds)
DuplicateScannerUtils.ScanRecords = function(pTargetEntity, pTargetRecordsData,
pEntityFields, pResultFields, pUseExternalWebservice, pFormatValuesConsumeWebserviceCallback, pAlreadyIdentifiedIds, pIndexPattern)
{
//logging.log("in ScanRecords -> ");
let foundDuplicateIds = [];
......@@ -380,14 +363,14 @@ DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTarget
// logging.log("Neuer Record -> " + pTargetRecordsData[b]);
//logging.log("pTargetRecordsData[b] -> " + JSON.stringify(pTargetRecordsData[b]));
let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pDuplicateFieldsConfig, pTargetRecordsData[b]);
//logging.log("Werte des Datensatzes entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay));
let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(pEntityFields, pTargetRecordsData[b]);
logging.log("Werte des Datensatzes entityFieldValuesRay -> " + JSON.stringify(entityFieldValuesRay));
//The first field in this Array must always be the configured id field. This is ensured using onValidation-logic
let idField = entityFieldValuesRay[0][0];
let idValue = entityFieldValuesRay[0][1];
// logging.log("idField -> " + idField);
// logging.log("idValue -> " + idValue);
logging.log("idField -> " + idField);
logging.log("idValue -> " + idValue);
//logging.log("pTargetRecordsData[b][idField] -> " + pTargetRecordsData[b][idField]);
//If the current Id has already been identified, continue
if(pAlreadyIdentifiedIds.indexOf(pTargetRecordsData[b][idField]) > -1)
......@@ -396,22 +379,22 @@ DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTarget
continue;
}
let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice)
let foundDuplicates = _DuplicateScannerUtils._scanForDuplicates(pTargetEntity,
entityFieldValuesRay, pResultFields, idField, idValue, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern)
if(foundDuplicates == null || foundDuplicates.length == 0)
{
//logging.log("Keine Datensätze gefunden continue;-> ");
logging.log("Keine Datensätze gefunden continue;-> ");
continue;
}
//logging.log("foundDuplicates.length nach _scanForDuplicates -> " + foundDuplicates.length);
logging.log("foundDuplicates.length nach _scanForDuplicates -> " + foundDuplicates.length);
//Insert all found duplicate ids into an cache array because those ids don't have to be checked again lateron.
for (let i = 0; i < foundDuplicates.length; i++)
{
let localId = foundDuplicates[i][indexsearch.FIELD_ID];
//logging.log("foundDuplicates[i] -> " + foundDuplicates[i]);
logging.log("foundDuplicates[i] -> " + foundDuplicates[i]);
foundDuplicateIds.push(localId);
}
......@@ -419,14 +402,14 @@ DuplicateScannerUtils.ScanRecords = function(pFilterName, pTargetEntity, pTarget
//The duplicates list contains only the found duplicates to the original id, therefore it get's added manually
foundDuplicateIds.push(pTargetRecordsData[b][idField]);
//logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
//logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds));
logging.log("foundDuplicates -> " + JSON.stringify(foundDuplicates));
logging.log("foundDuplicateIds -> " + JSON.stringify(foundDuplicateIds));
let insertQueriesRay = _DuplicateScannerUtils._createInsertDuplicatesClusterQuery(foundDuplicateIds, pTargetEntity)
duplicatesToInsertQueries = duplicatesToInsertQueries.concat(insertQueriesRay);
foundDuplicateIds = [];
}
//logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries));
logging.log("duplicatesToInsertQueries -> " + JSON.stringify(duplicatesToInsertQueries));
db.inserts(duplicatesToInsertQueries, db.getCurrentAlias(), 10 * datetime.ONE_MINUTE);
return foundDuplicateIds;
}
......@@ -628,41 +611,24 @@ DuplicateScannerUtils.ScanForDuplicates = function(pFilterName, pTargetEntity, p
{
let useExternalWebservice = _DuplicateScannerUtils._isUseExternalWebservice(pFilterName, pTargetEntity);
let resultFields = DuplicateScannerUtils.LoadResultFields(pFilterName, pTargetEntity);
let duplicateFieldsConfig = DuplicateScannerUtils.LoadIndexFieldsConfiguration(pFilterName, pTargetEntity);
//logging.log("duplicateFieldsConfig -> " + duplicateFieldsConfig);
let entityFieldsToLoad = _DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig(duplicateFieldsConfig);
//logging.log("entityFieldsToLoad " + entityFieldsToLoad)
//If the fields from the prefilter aren't in the entity fields to load, add them manually
let filterFieldConfigs = _DuplicateScannerUtils._getFieldConfigsFromFilterFields(pFilterName, pTargetEntity);
//logging.log("filterFieldConfigs " + filterFieldConfigs)
if(filterFieldConfigs.length > 0)
{
let INDEX_FILTER_FIELD_NAME = 0;
for (let i = 0; i < filterFieldConfigs.length; i++)
{
let filterFieldConfig = filterFieldConfigs[i];
let filterFieldName = filterFieldConfig[INDEX_FILTER_FIELD_NAME];
//logging.log("filterFieldName" + filterFieldName)
//logging.log("filterFieldConfig -> " +filterFieldConfig );
if(entityFieldsToLoad.indexOf(filterFieldName) < 0)
{
//logging.log("Noch nicht vorhanden, hinzufügen -> ");
duplicateFieldsConfig.push(filterFieldConfig);
}
}
}
let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(duplicateFieldsConfig, pValuesToCheck);
//The first field in this Array must always be the configured id field.
let indexPattern = _DuplicateScannerUtils._loadIndexPattern(pFilterName, pTargetEntity);
let entityFields = _DuplicateScannerUtils._loadEntityFieldsFromPattern(indexPattern);
let entityIdField = _DuplicateScannerUtils._loadEntityIdField(pFilterName, pTargetEntity);
//Add the id field to ensure that it's on the first position
entityFields = [entityIdField].concat(entityFields);
let entityFieldValuesRay = DuplicateScannerUtils.BuildEntityFieldNameValueRays(entityFields, pValuesToCheck);
//The first field in this Array must always be the configured id field.
logging.log("ray " + entityFieldValuesRay.toSource())
let idField = entityFieldValuesRay[0][0];
let idValue = entityFieldValuesRay[0][1];
return _DuplicateScannerUtils._scanForDuplicates(pFilterName, pTargetEntity,
return _DuplicateScannerUtils._scanForDuplicates(pTargetEntity,
entityFieldValuesRay, resultFields, idField, idValue,
pFormatValuesConsumeWebserviceCallback, useExternalWebservice)
pFormatValuesConsumeWebserviceCallback, useExternalWebservice, indexPattern)
}
/*
......@@ -818,7 +784,6 @@ DuplicateScannerUtils.MergeOrganisation = function(pSourceContactId, pTargetCont
DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsConfig, pTargetRecordData)
{
let INDEX_CONFIG_ENTITY_FIELD = 0;
let INDEX_CONFIG_USE_FOR_SEARCH = 2;
let entityFieldValuesRay = [];
/*
* Based on the parameterized filter field names and the values loaded via the query,
......@@ -829,8 +794,13 @@ DuplicateScannerUtils.BuildEntityFieldNameValueRays = function(pDuplicateFieldsC
{
// logging.log("pDuplicateFieldsConfig[a][1] -> " + pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD]);
// logging.log(" pTargetRecordData[a] -> " + pTargetRecordData[a]);
let entityField = pDuplicateFieldsConfig[a][INDEX_CONFIG_ENTITY_FIELD];
entityFieldValuesRay.push([entityField, pTargetRecordData[entityField], pDuplicateFieldsConfig[a][INDEX_CONFIG_USE_FOR_SEARCH]])
let entityField = pDuplicateFieldsConfig[a];
let entityFieldValue = pTargetRecordData[entityField];
if(entityFieldValue == null)
entityFieldValue = "";
entityFieldValue = entityFieldValue.trim();
entityFieldValuesRay.push([entityField, entityFieldValue]);
}
return entityFieldValuesRay.length > 0 ? entityFieldValuesRay : [["", ""]];
}
......@@ -856,85 +826,85 @@ _DuplicateScannerUtils._buildUpdateResetStandardCommunications = function(pSourc
return [["COMMUNICATION", ["ISSTANDARD"], null, ["0"], "CONTACT_ID = '" + pSourceContactId + "'"]];
}
_DuplicateScannerUtils._getEntityFieldNamesFromFieldsConfig = function(pDuplicateFieldsConfig)
/*
* Gets the Pattern for the scanner
* A pattern usually contains placeholders in the style of "{entityFieldName]"
*
* @param {String} pScannerName Name of the filter to use
* @param {String} pTargetEntity The target entity which has been assigned to the filters configuration
* @returns {String} Scan pattern as string
*/
_DuplicateScannerUtils._loadIndexPattern = function(pScannerName, pTargetEntity)
{
let entityFieldsToLoad = [];
for (field in pDuplicateFieldsConfig)
{
entityFieldsToLoad.push(pDuplicateFieldsConfig[field][0]);
}
return entityFieldsToLoad;
let scanPatternQuery = SqlCondition.begin()
.and("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME = '" + pTargetEntity + "'")
.and("DUPLICATESCANNER.FILTER_NAME = '" + pScannerName + "'")
.buildSql("select SCAN_PATTERN from DUPLICATESCANNER");
return db.cell(scanPatternQuery);
}
_DuplicateScannerUtils._getFieldConfigsFromFilterFields = function(pFilterName, pTargetEntity)
_DuplicateScannerUtils._loadEntityFieldsFromPattern = function(indexPattern)
{
let resultFields = [];
let filterFields = [];
let filters = DuplicateScannerUtils.loadFilters(pFilterName, pTargetEntity)
return indexPattern.match(/[^{}]+(?=\})/g);
}
for (let i = 0; i < filters.length; i++)
{
let filter = JSON.parse(filters[i][0]).filter;
let fields = JditoFilterUtils.getFilterFields(filter.childs);
filterFields = filterFields.concat(fields);
}
for (let i = 0; i < filterFields.length; i++)
_DuplicateScannerUtils._replacePlaceholderForValuesInPattern = function(pIndexPattern, pEntityFieldValueRays)
{
let INDEX_ENTITY_FIELD_NAME = 0;
let INDEX_ENTITY_FIELD_VALUE = 1;
let placeholder = "";
let fieldValue = "";
for (let i = 0; i < pEntityFieldValueRays.length; i++)
{
let filterField = filterFields[i];
resultFields.push([filterField, 0, 0]);
placeholder = "{" + pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_NAME] + "}";
fieldValue = pEntityFieldValueRays[i][INDEX_ENTITY_FIELD_VALUE];
logging.log("placeholder -> " + placeholder);
logging.log("fieldValue -> " + fieldValue);
pIndexPattern = pIndexPattern.replace(placeholder, fieldValue);
}
return resultFields;
return pIndexPattern;
}
_DuplicateScannerUtils._loadEntityIdField = function(pFilterName, pTargetEntity)
{
let loadEntityIdFieldQuery = SqlCondition.begin()
.andPrepare("DUPLICATESCANNER.FILTER_NAME", pFilterName)
.andPrepare("DUPLICATESCANNER.ENTITY_TO_SCAN_NAME", pTargetEntity)
.buildSql("select ID_FIELD_NAME from DUPLICATESCANNER", "1=2");
return db.cell(loadEntityIdFieldQuery);
}
/*
* @see DuplicateScannerUtils.ScanForDuplicates for the documentation
*/
_DuplicateScannerUtils._scanForDuplicates = function(pFilterName, pTargetEntity, pFilterFieldValueRays,
pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice)
_DuplicateScannerUtils._scanForDuplicates = function(pTargetEntity, pEntityFieldNameValueRays,
pResultFields, pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pFormatValuesConsumeWebserviceCallback, pUseExternalWebservice, pIndexPattern)
{
let possibleDuplicates = [];
let ignoredRecordFilter = _DuplicateScannerUtils._getIgnoreRecordFilter(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity);
let configuredFilters = _DuplicateScannerUtils._loadFilters(pFilterName, pTargetEntity);
//logging.log("Found filters -> " + configuredFilters);
let preFilter = null;
//Only run the prefilter if filters have been configured. If not, run the indexsearch based on the field configuration
if(configuredFilters != null && configuredFilters.length > 0)
{
//To ensure the record which the current search is based on isnt found as result, the other configured filters get appended to
//the filter of said records to ignore
configuredFilters = [ignoredRecordFilter].concat(configuredFilters);
preFilter =_DuplicateScannerUtils._applyPreFilter(pTargetEntity, configuredFilters, pFilterFieldValueRays);
//logging.log("preFilter welcher Elemente im erlaubten bereich ausgibt -> " + preFilter);
//The scan can be executed even without any prefilters. If a prefilter has been configured but doesn't match the
//threshold criteria no search shall be run.
if(preFilter == null)
return null;
}
//No prefilter and no filterfields => No indexsearch
if(preFilter == null && pFilterFieldValueRays.length < 1)
//No filterfields/indexpattern => No indexsearch
if(pEntityFieldNameValueRays.length < 1 || pIndexPattern == null || pIndexPattern == "")
return null;
//If at this point the prefilter is null but a search has to be executed, add the ignorefilter manually that the search doesn't find the base record as duplicate to itself.
//This is the case if no prefilter but indexfields are configured.
if(preFilter == null)
preFilter = ignoredRecordFilter;
possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, preFilter, pFilterFieldValueRays, pResultFields, 100);
//logging.log("possibleDuplicates -> " + JSON.stringify(possibleDuplicates));
let possibleDuplicates = [];
let ignoreSourceRecordPattern = _DuplicateScannerUtils._getIgnoreSourceRecordPattern(pRecordIdFieldToIgnore, pRecordIdValueToIgnore);
logging.log("ignoreSourceRecordPattern -> " + ignoreSourceRecordPattern);
let indexPatternWithValues = _DuplicateScannerUtils._replacePlaceholderForValuesInPattern(pIndexPattern, pEntityFieldNameValueRays);
logging.log("indexPatternWithValues -> " + indexPatternWithValues);
indexPatternWithValues = ignoreSourceRecordPattern + indexPatternWithValues + ")";
logging.log("indexPatternWithValues -> " + indexPatternWithValues);
possibleDuplicates = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, indexPatternWithValues, pResultFields, 100);
logging.log("possibleDuplicates -> " + JSON.stringify(possibleDuplicates));
if(possibleDuplicates == null)
return null;
possibleDuplicates = possibleDuplicates[indexsearch.HITS];
if(pUseExternalWebservice && possibleDuplicates.length > 0 && pFormatValuesConsumeWebserviceCallback != null)
if(pUseExternalWebservice && pFormatValuesConsumeWebserviceCallback != null)
possibleDuplicates = pFormatValuesConsumeWebserviceCallback.apply(this, [possibleDuplicates]);
// //logging.log("pTargetEntity -> " + pTargetEntity);
......@@ -965,161 +935,29 @@ _DuplicateScannerUtils._isUseExternalWebservice = function(pFilterName, pTargetE
return (isUseWebservice == 0) ? false : true;
}
/*
* Applies... the configured prefilters. The pre filter is used to narrow the records to be searched by the duplicate scan service
* The filters get applied sequentially one after another.
* If the number of results equal or lower as the configured threshold and greater than zero, the current filter combination gets returned.
* If no more filters are available and the number of results still exceed the threshold, null gets returned.
* If the number of results reach zero while aplying filters, null gets returned
* The reason is, that if a huge count of records were to be used, the time to search for duplicates would be substantially longer.
* If the prefilters to not have the desired effect it is advised to modify the configured filter conditions
*
* @param {String} pTargetEntity Entity which has been configured
* @param {String[[]]} pFilterCountCharactersToUseRay Array of Arrays containing the configuration of the filters.
* The structure is as follows: [INDEX_FILTER_CONDITION, INDEX_COUNT_CHARS_TO_USE, INDEX_MAX_RESULTS_THRESHOLD]
* @param {String[[]]} pFilterFieldValueRays Array of Arrays containing the name of a used field and its value.
* Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order
* @returns {String} Null if the records count wasnt inside the threshold, the used combined filter to achieve a successfull prefiltering
*/
_DuplicateScannerUtils._applyPreFilter = function(pTargetEntity, pFilterCountCharactersToUseRay, pFilterFieldValueRays)
{
// logging.log("#pFilterFieldValueRays#" + JSON.stringify(pFilterFieldValueRays))
var combinedFilter = {};
for (i = 0; i < pFilterCountCharactersToUseRay.length; i++)
{
var filter = pFilterCountCharactersToUseRay[i][INDEX_FILTER_CONDITION];
// logging.log("#1#filter>>" + filter)
let countCharsOfValueToUse = pFilterCountCharactersToUseRay[i][INDEX_COUNT_CHARS_TO_USE];
let maxResultsThreshold = pFilterCountCharactersToUseRay[i][INDEX_MAX_RESULTS_THRESHOLD];
if(filter == null || filter == "")
continue;
filter = JSON.parse(filter);
let filterValuesObject = {};
for (a = 0; a < pFilterFieldValueRays.length; a++)
{
filterValuesObject[pFilterFieldValueRays[a][0]] = pFilterFieldValueRays[a][1];
}
//logging.log("filterValuesObject zum füllen des jsons -> " + JSON.stringify(filterValuesObject));
/*
* Insert the values into the current filter. Has to be here so that only the new filter
* and therefore the combinedFilter incrementally gets filled and not always everything multiple times.
*/
//logging.log("1 ###### filter zum befüllen mit werten-> " + JSON.stringify(filter));
filter.filter.childs = _DuplicateScannerUtils._insertValuesInFilterTemplate(filter.filter.childs, filterValuesObject, countCharsOfValueToUse);
//logging.log("2 ###### gefüllter filter prefilter index-> " + JSON.stringify(filter));
if(i == 0)
combinedFilter = filter.filter;
else
{
//logging.log("3 ###### aktueller combinedFilter -> " + JSON.stringify(combinedFilter));
//logging.log("4 ###### gefüllter filter -> " + JSON.stringify(filter));
//Extend the current combined filter with the next filter condition to further refine the results
//It seems to always contain one child element at the root
//combinedFilter.childs.push(filter.filter.childs);
let newCombinedFilterChilds = combinedFilter.childs.concat(filter.filter.childs);
combinedFilter.childs = newCombinedFilterChilds;
}
//logging.log("5 ###### combinedFilter + gefüllter filter vor indexsuche-> " + JSON.stringify(combinedFilter));
filter.filter = combinedFilter;
//logging.log("6 ###### completeFilter -> " + JSON.stringify(filter));
//Workaround to load the smallest possible resultset because only the TOTALHITS are relevant at this time
//Only load "indexsearch.FIELD_ID" and a resultSet size of 1
let searchResult = _DuplicateScannerUtils._callIndexSearch(pTargetEntity, JSON.stringify(filter), [],
[], 1);//todo use again after this has been fixed!! insert the local id after fix
//logging.log("searchResults hits length -> " + searchResult[indexsearch.HITS].length);
// if(searchResult[indexsearch.TOTALHITS] < 80)//todo entfernen?!
// {
// for (let i = 0; i < searchResult[indexsearch.HITS].length; i++)
// {
// logging.log("Treffer Nr -> " + i);
// //searchResults hits 0 -> {#ADITO_SEARCH_ID=1868bd3a-05af-4b7f-a633-e3aec50ac45c, _index_group_=Person, #ADITO_SEARCH_TYPE=Person, firstname_value=Peter, _local_id_=1868bd3a-05af-4b7f-a633-e3aec50ac45c}
// let localId = searchResult[indexsearch.HITS][i]["_local_id_"];
// let firstname = searchResult[indexsearch.HITS][i]["firstname_value"];
// let indexGroup = searchResult[indexsearch.HITS][i]["_index_group_"];
// logging.log("localId -> " + localId);
// logging.log("firstname -> " + firstname);
// logging.log("indexGroup -> " + indexGroup);
// }
// }
let totalHits = searchResult[indexsearch.TOTALHITS]
//logging.log("totalHits -> " + totalHits);
if(totalHits > maxResultsThreshold)
{
//logging.log("zu viele rows gefundenn nächsten Filter anwenden -> totalHits:" + totalHits + " maxResultsThreshold:" + maxResultsThreshold);
//Found more rows than allowed by the threshold, run next filter to narrow the results
continue;
}
else if(totalHits <= 0)
{
return null;
}
else
{
//we're in the treshold, return the valid filter. The filter gets used lateron.
return JSON.stringify(filter);
}
}
//logging.log("zu viele rows und keine filter mehr -> ");
return null;
}
/*
* Runs the indexsearch based on the given parameters.
* If the "pEntityFieldValueRays" is empty, only the prefilters get applied as pattern.
* if not, the prefilters will be applies as pattern and the contents of "pEntityFieldValueRays" get applies as filter.
*
* If the "pEntityFieldValueRays" is empty, no search will be executed.
*
* @param {String} pTargetEntity Entity which has been configured
* @param {String} pPreFilterJson The prefilters
* @param {String[[]]} pEntityFieldValueRays Array of Arrays containing the name of a used field and its value.
* Use "DuplicateScannerUtils.BuildEntityFieldNameValueRays". The fieldnames and values have to be in the same order. NotNull!->Empty Array
* @param {String} pIndexPatternWithValues The pattern used to search. Has to contain the values already.
* @param {String} pResultFields The result field config. Use "DuplicateScannerUtils.LoadResultFields"
* @param {String} pResultSetRows todo
* @returns {[["key", "value"]]} Array of Key-Value-Pairs based on the configured pResultFields
* @returns {[["key", "value"]] || null} Array of Key-Value-Pairs based on the configured pResultFields, if no pattern exists null
*/
_DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pPreFilterJson, pEntityFieldValueRays, pResultFields, pResultSetRows)
_DuplicateScannerUtils._callIndexSearch = function(pTargetEntity, pIndexPatternWithValues, pResultFields, pResultSetRows)
{
let indexPattern = null;
let filterPattern = null;
//The pPreFilterJson is never null because it always contains at least the default ignore record filter
indexPattern = indexsearch.buildQueryFromSearchCondition(pPreFilterJson);
indexPattern = indexPattern.replace(null, "");//todo remove if api has been fixed
let filterPatternConfig = _DuplicateScannerUtils._buildFilterPatternConfig(pEntityFieldValueRays, pTargetEntity);
if(filterPatternConfig != null)
filterPattern = indexsearch.buildPatternString(filterPatternConfig);
//The indexPattern can't be null because it is required to run the search.
if(indexPattern == null)
if(pIndexPatternWithValues == null || pIndexPatternWithValues == "")
return null;
let indexQuery = indexsearch.createIndexQuery()
.setPattern(indexPattern)
.setPattern(pIndexPatternWithValues)
.setEntities([pTargetEntity])
//.addSearchFields("Person_entity.FIRSTNAME", "Person_entity.LASTNAME", "Person_entity.CONTACTID")
//.setRows(pResultSetRows);
indexQuery = _DuplicateScannerUtils._setResultFields(indexQuery, pResultFields);
if(filterPattern != null)
indexQuery = indexQuery.addFilter(filterPattern);
logging.log("indexQuery: PATTERN + FILTERS -> " + indexQuery.getPattern() + " " + indexQuery.getFilters());
return indexsearch.searchIndex(indexQuery);
}
......@@ -1332,18 +1170,15 @@ _DuplicateScannerUtils._buildDeleteCachedUnrelatedDuplicateQuery = function(pSou
}
/*
* Creates a filter JSON which excludes the field and it's value using the operator "NOT_EQUAL"
* Creates a pattern which excludes the field and it's value
*
* @param {String} pRecordIdFieldToIgnore Field to be ignored
* @param {String} pRecordIdValueToIgnore The fields value
* @param {String} pTargetEntity Entity which has been configured
* @returns {String[]} Array where 0 = filter, 1 = null(INDEX_COUNT_CHARS_TO_USE), 2 = null(INDEX_MAX_RESULTS_THRESHOLD)
* @returns {String} Pattern which excludes the gived field in combination with the value
*/
_DuplicateScannerUtils._getIgnoreRecordFilter = function(pRecordIdFieldToIgnore, pRecordIdValueToIgnore, pTargetEntity)
_DuplicateScannerUtils._getIgnoreSourceRecordPattern = function(pRecordIdFieldToIgnore, pRecordIdValueToIgnore)
{
let ignoreFilterJson = JSON.stringify({"entity":pTargetEntity,"filter":{"type":"group","operator":"AND","childs":[{"type":"row","name":pRecordIdFieldToIgnore,"operator":"NOT_EQUAL","value":pRecordIdValueToIgnore,"key":"","contenttype":"TEXT"}]}});
return [ignoreFilterJson, null, null];
return "(+(-" + pRecordIdFieldToIgnore.toLowerCase() + "_value:(" + pRecordIdValueToIgnore + ")) ";
}
_DuplicateScannerUtils._buildUpdateContactIdStatements = function(pTableInfos, pSourceContactId, pTargetContactId)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment