
How much faster is Node.js than Windows Script Host? And what does this tell us?
![[quote]](https://habrastorage.org/storage2/56a/733/99e/56a73399e3eddae293b47c66acc40877.png)
What brought such praise to life? After all, it would seem that long before the Node engine even came to light, Microsoft had its own (built-in Windows, starting with Windows 98, as well as installed with Internet Explorer 5) tool for running scripts - Windows Script Host .
The answer to this question is obvious: Node.js is much faster. (By the way, he also has an easier API. But speed is the main thing.)
But how much faster is Node than WSH?
It is not difficult to find out empirically. Take the script with which evgeniyup yesterday compared the performance of WSH with the performance of the virtual machine of its ObjectScript language. At the beginning of the script, add a dozen lines - a “crutch” for Node.js, which implements the
show (or hide again) the source code of the script
// CScript to Node.js:
if (typeof WScript == "undefined") WScript = {};
if (typeof WScript.Echo == "undefined") WScript.Echo = function(){
var i;
var log = '';
for (i=0; i < arguments.length; i++){
log += arguments[i];
}
console.log(log);
}
var fannkuch = function(n)
{
var p = [], q = [], s = [], sign = 1, maxflips = 0, sum = 0;
var i;
for(i=1; i<=n; i++) p[i] = q[i] = s[i] = i;
for(;;){
// Copy and flip.
var q1 = p[1]; // Cache 1st element.
if(q1 != 1){
for(i=2; i<=n; i++) q[i] = p[i]; // Work on a copy.
var flips = 1;
for(;;){
var qq = q[q1];
if(qq == 1){ // ... until 1st element is 1.
sum = sum + sign*flips;
if(flips > maxflips){
maxflips = flips;
} // New maximum?
break;
}
q[q1] = q1;
if(q1 >= 4){
var i = 2, j = q1 - 1
for(;;){ var tmp = q[i]; q[i] = q[j]; q[j] = tmp; if(++i >= --j) break; }
}
q1 = qq; flips++;
}
}
// Permute.
if(sign == 1){
var tmp = p[2]; p[2] = p[1]; p[1] = tmp; sign = -1; // Rotate 1<-2.
}else{
var tmp = p[2]; p[2] = p[3]; p[3] = tmp; sign = 1; // Rotate 1<-2 and 1<-2<-3.
for(i = 3;; i++){
// print "mark 4"
var sx = s[i];
if(sx != 1){ s[i] = sx-1; break; }
if(i == n) return [sum, maxflips]; // Out of permutations.
s[i] = i;
// Rotate 1<-...<-i+1.
var t = p[1]; for(var j = 1; j <= i; j++){ p[j] = p[j+1]; } p[i+1] = t;
}
}
}
}
function getTimeSec(){
var d = new Date();
return (d.getTime() + d.getMilliseconds() / 1000.0) / 1000.0;
}
var n = 10;
var start_time = getTimeSec();
var r = fannkuch(n);
var sum = r[0], flips = r[1];
WScript.Echo(
sum,"\n",
"Pfannkuchen(",n,") = ",flips,"\n",
"time = ",(getTimeSec() - start_time),"\n"
)
After that, it is enough to run this script twice (first in Node, then
![[screenshot]](https://habrastorage.org/storage2/f40/639/45a/f4063945a9403e5ab38907b83d1a0a00.png)
The difference is two orders of magnitude! Computing that Node.js can do in a second, the Windows Script Host has been grinding for more than two minutes.
I measured
The conclusion is also quite simple and accessible to everyone: with the advent of
In the real world, this only happens to people in superhero comics. And in programming, an awkward thing easily happens: the developer programmed in javascript, then
And this should give rise to further deep reflection on the prospects.
Or at least an order of magnitude faster.