The revision of the MDN guide on working with objects from July 15th, 2014, states:
If an object is created with an object initializer in a top-level script, JavaScript interprets the object each time it evaluates an expression containing the object literal.
However, in the snippet below, when objLit2.val2 is evaluated, it does not use the last value set which should be 10 and 100; it instead uses 1000 which was the value declared when objLit2 is defined. Why is it so?
var i = 1000;
function iPlus3() {
alert("iPlus3");
return i + 3;
}
var objLit2 = {
val: iPlus3,
val2: i = i + 1
};
function setValue() {
i = 10;
console.log("objLit2Val1", objLit2.val(), objLit2.val2); // Outputs 13 1001 and not 13 11
i = 100;
console.log("objLit2Val2", objLit2.val(), objLit2.val2); // Outputs 103 1001 and not 103 101
// If `val` is interpreted at the time of the call, why is `val2` not also interpreted on each call?
}
setValue();