func factorialA(n int64) *big.Int {
var fact = new(big.Int)
fact.MulRange(1, n)
return fact
}
func factorialB(n int) *big.Int {
var result = big.NewInt(1)
for i := 2; i <= n; i++ {
result = result.Mul(result, big.NewInt(int64(i)))
}
return result
}
The second function takes, for n = 1 million, double as the first. How come, I figure they do about the same number of multiplications. Any ideas ?