One of my friend was asked this question in an interview -
Here's another possibility. Unlike my previous answer it doesn't modify the arrays passed in, and should have a lower big-O bound (O(n) instead of O(n^2) - assuming constant time hashtable lookups), but will take up significantly more memory.
function findUnique(a:Array, b:Array):Array {
var aHash:Hashtable = buildHash(a);
var bHash:Hashtable = buildHash(b);
var uniqueFromA:int;
var uniqueFromB:int;
for each(value:int in a) {
if(!bHash.contains(value)) {
uniqueFromA = value;
break;
} else {
/* Not necessary, but will speed up the 2nd for-loop by removing
* values we know are duplicates. */
bHash.remove(value);
}
}
for each(value:int in b) {
if(!aHash.contains(value)) {
uniqueFromB = value;
break;
}
}
return [uniqueFromA, uniqueFromB];
}
function buildHash(a:Array):Hashtable {
var h:Hashtable = new Hashtable();
for each(value:int in a) {
h[value] = true;
}
return h;
}