Say I have an initial array of objects:
var initialData = [
{
\'ID\': 1,
\'FirstName\': \'Sally\'
},
{
Actually, if you are interested on performance, you could think on changing your initialData structure to something like this:
var initialData = {
"1": {'FirstName': 'Sally'},
"2": {'FirstName': 'Jim'},
"3": {'FirstName': 'Bob'}
};
In other words, we use the IDs as the keys of an object, this will give you O(1) on access the data, and O(1) in the exists test. You can get this structure using the next approach with reduce():
var initialData = [
{'ID': 1, 'FirstName': 'Sally'},
{'ID': 2, 'FirstName': 'Jim'},
{'ID': 3, 'FirstName': 'Bob'}
];
let newInitialData = initialData.reduce((res, {ID, FirstName}) =>
{
res[ID] = {FirstName : FirstName};
return res;
}, {});
console.log(newInitialData);
Using this new structure, you can make a O(n) algorithm to insert the new data that is not already there:
var initialData = {
"1": {'FirstName': 'Sally'},
"2": {'FirstName': 'Jim'},
"3": {'FirstName': 'Bob'}
};
var newData = [
{'ID': 2, 'FirstName': 'Jim'},
{'ID': 4, 'FirstName': 'Tom'},
{'ID': 5, 'FirstName': 'George'}
];
newData.forEach(({ID, FirstName}) =>
{
initialData[ID] = initialData[ID] || {FirstName: FirstName};
});
console.log(initialData);