# Approximations for log (n choose r)

This program calculates $n \choose r$ and various approximations of it.
#!/home/ben/software/install/bin/perl
use warnings;
use strict;
use Math::Trig;

for my $n (1..100) { for my$r (0..$n) { print "$n;$r: "; my$c = choose ($n,$r);
my $ca = choose_approx ($n, $r); my$cb = choose_approx_better ($n,$r);
my $lc = log ($c);
my $ratio = 1; if ($lc && $ca) {$ratio = $lc /$ca;
}
my $ratiob = 1; if ($lc && $cb) {$ratiob = $lc /$cb;
}
printf "%5g %5g %5g %5g %5g", $lc,$ca, $ratio,$cb, $ratiob; print "\n"; } print "\n"; } # This is an approximation of "ln (n choose r)", equation # (1.15) of MacKay (2003). sub choose_approx { my ($n, $r) = @_; my$c = $n * binary_entropy ($r / $n); return$c;
}

# This is the "better approximation" of "ln (n choose r)", equation
# (1.17) of MacKay (2003).

sub choose_approx_better
{
my ($n,$r) = @_;
my $c =$n * binary_entropy ($r /$n);
if ($r > 0 &&$r != $n) {$c -= 1/2 * log (2*pi*($n-$r)*$r/$n);
}
return $c; } # This calculates the binary entropy function for$x. At the extreme
# points $x = 0 and$x = 1, it returns 0. From MacKay (1.14).

sub binary_entropy
{
my ($x) = @_; my$h;
if ($x == 0 ||$x == 1) {
$h = 0; } else {$h = $x * log (1/$x) + (1 - $x) * log (1/(1 -$x));
}
return $h; } # This calculates "n choose r", n!/((n-r)!r!). sub choose { my ($n, $r) = @_; my$z = 1;
for (($n -$r + 1)..$n) {$z *= $_; } my$c = $z / factorial ($r);
return $c; } # This calculates "n factorial", n!. sub factorial { my ($n) = @_;
if ($n == 0) { return 1; } my$f = 1;
while ($n > 1) {$f *= $n;$n--;
}
return \$f;
}