package Bugzilla::Leak; use strict; # Brief instructions: call Bugzilla::Leak::size() every so often. # Don't call size() too often, though--it's slow. # When your process is too big, call Bugzilla::Leak::dump_memory() # and then exit. dump_memory will print out a lot of stuff to # STDERR that will probably help you figure out what's using RAM. # # Warning: dump_memory seems to take quite a bit of RAM itself-- # as much as 10x what your process is already using. I don't know # why it takes *so much* RAM. # # The numbers dump_memory shows for SIZE are not the real size that # that data is taking up in RAM, most likely. It's just an approximation, # to give you an idea of what items are taking up the most RAM. use Data::Dumper; use Devel::Gladiator; use Linux::Smaps; use Scalar::Util qw(blessed); our $last_rss = 0; our $last_shared = 0; use constant MAX_KEY_LEN => 512; sub check { my ($prefix) = @_; $prefix ||= ''; my $smaps = Linux::Smaps->new($$); my $current_rss = $smaps->rss; my $current_shared = $smaps->shared_clean + $smaps->shared_dirty; my $rss_change = $current_rss - $last_rss; if ($rss_change > 100) { warn "$prefix: RSS grew by $rss_change\n"; } my $shared_change = $current_shared - $last_shared; if ($shared_change > 100) { warn "$prefix: Shared grew by $shared_change\n"; } $last_rss = $current_rss; $last_shared = $current_shared; } sub size { my $smaps = Linux::Smaps->new($$); my $current_rss = $smaps->rss; my $current_shared = $smaps->shared_clean + $smaps->shared_dirty; return $current_rss + $current_shared; } sub dump_memory { print STDERR Devel::Gladiator::arena_table(); my $all = Devel::Gladiator::walk_arena(); my %info_table; local $Data::Dumper::Terse = 1; foreach my $sv (@$all) { if (ref $sv eq 'HASH' or (blessed($sv) and $sv->isa('HASH'))) { my @keys; eval { @keys = sort keys %$sv }; # || warn "HASH ERROR: $sv"; if (@keys) { next if grep { $_ =~ /^HASH:{/ } @keys; # Don't track info_table itself. my $joined_keys = join(",", @keys); my $key = "HASH:{$joined_keys}"; $info_table{$key}->{count}++; next if grep { $_ =~ m{Bugzilla/Leak.pm} } @keys; # Don't size %INC and similar vars--they crash Dumper. my $full_string = eval { Dumper($sv) }; if (defined $full_string) { $info_table{$key}->{size} += length($full_string); } } else { #warn "HASH ERROR: $sv"; } } elsif (!ref $sv or ref $sv eq 'SCALAR') { my $as_string = eval { Dumper($sv) }; if (defined $as_string) { my $str_len = length($as_string); if ($str_len > MAX_KEY_LEN) { my $start = substr($as_string, 0, MAX_KEY_LEN / 2); my $end = substr($as_string, -(MAX_KEY_LEN / 2)); $as_string = "$start ... $end"; } $info_table{$as_string}->{count}++; $info_table{$as_string}->{size} += $str_len; } else { #warn "SV ERROR: $sv\n"; } } } select STDERR; $| = 1; select STDOUT; print STDERR "\n\n----\nSIZE\n----\n\n"; my @sorted_size = grep { defined $info_table{$_}->{size} and $info_table{$_}->{size} > 100 } keys %info_table; @sorted_size = sort { $info_table{$a}->{size} <=> $info_table{$b}->{size} } @sorted_size; while (my $key = pop @sorted_size) { my $size = $info_table{$key}->{size}; $key =~ s/\n+$//s; print STDERR sprintf("%10d: %s\n", $size, $key); } print STDERR "\n\n------\nCOUNT\n------\n\n"; my @sorted_count = grep { $info_table{$_}->{count} >= 2 } keys %info_table; @sorted_count = sort { $info_table{$a}->{count} <=> $info_table{$b}->{count} } @sorted_count; while (my $key = pop @sorted_count) { my $count = delete $info_table{$key}->{count}; $key =~ s/\n+$//s; print STDERR sprintf("%10d: %s\n", $count, $key); } } 1;