function sumPrimes(num) {
var arr = [];
var ifTrue = 1;
for (var i = 2; i < num; i ++) {
ifTrue = 1;
for (var j = 2; j < i; j ++) {
if (i % j === 0) {
ifTrue = 0;
}
}
if (ifTrue == 1) {
arr.push(i);
}
}
num = arr.reduce(function(a,b) {
return a + b ;
});
return num;
}
Hi @Lijianliang1997, as your code wasn't really working. I made slight modification to it and is now working.
function sumPrimes(num) {
var arr = [];
for (var i = 2; i <= num; i ++) {
var divisible = false;
for (var j = 2; j < i; j ++) {
if (i !== j && i % j === 0) {
divisible = true;
}
}
if (divisible === false) {
arr.push(i);
}
}
num = arr.reduce(function(a,b) {
return a + b ;
});
return num;
}
sumPrimes(1000);
//=> 76127
The key was this if statement
if (i !== j && i % j === 0) { divisible = true;}
You only divide if you are not dividing it by itself. Other wise you will get 0 since 2%2 => 0