It seems to me that there are four different ways I can determine whether a given object (e.g. foo) has a given property (e.g. bar) defined:
These are all different:
foo.hasOwnProperty('bar') tells you whether foo has the property and does not perform lookup along the prototype chain.
'bar' in foo checks the prototype chain and returns true when it finds property bar in any object along the chain.
typeof foo.bar != 'undefined' returns true if foo or any object along its prototype chain has property bar and it's value is not undefined.
Here is an example that demonstrates these differences:
var foo1 = { 'bar1': 10, 'bar2': undefined };
function ctor() {}
ctor.prototype = foo1;
var foo2 = new ctor();
foo2.bar3 = 20;
console.log(foo2.hasOwnProperty('bar1')); // false
console.log(foo2.hasOwnProperty('bar2')); // false
console.log(foo2.hasOwnProperty('bar3')); // true
console.log(foo2.hasOwnProperty('bar4')); // false
console.log('bar1' in foo2); // true
console.log('bar2' in foo2); // true
console.log('bar3' in foo2); // true
console.log('bar4' in foo2); // false
console.log(typeof foo2.bar1 != 'undefined'); // true
console.log(typeof foo2.bar2 != 'undefined'); // false
console.log(typeof foo2.bar3 != 'undefined'); // true
console.log(typeof foo2.bar4 != 'undefined'); // false