Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Commit

Permalink
[orabos] Tune vm.watermark_scale_factor
Browse files Browse the repository at this point in the history
The value is scaling with the amount of RAM in the system,
but we take a way a large chunk for huge-pages.
  • Loading branch information
fwiesel committed Jul 18, 2024
1 parent 9414d64 commit 5967a01
Showing 1 changed file with 21 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,33 @@

hugepages=$(getarg hugepages=) || hugepages=0

mem_total_mb=$(($(sed -rn 's/MemTotal:\s+(.*) kB/\1/p' /proc/meminfo) / 1024 ))
hugepagesize_mb=$(($(sed -rn 's/Hugepagesize:\s+(.*) kB/\1/p' /proc/meminfo) / 1024 ))

function adopt_watermark_scale_factor() {
# On a 3TiB host, the default watermark_scale_factor=10 was exactly that
# that the kswapd0 was running permanently. Setting it to 5 was solving the
# issue, but is likely a suboptimal value, but a first start.
# The value 500 reproduces exactly that value for that scale, and hopefully
# also holds for larger hosts.
max_watermark_scale_factor=$(($non_hugepages_mb * 500 / $mem_total_mb))
watermark_scale_factor=$(</proc/sys/vm/watermark_scale_factor)
if [ $max_watermark_scale_factor -lt $watermark_scale_factor ]; then
echo $max_watermark_scale_factor > /proc/sys/vm/watermark_scale_factor
fi
}

if [ $hugepages -gt 0 ]; then
hugepages_mb=$(($hugepages * $hugepagesize_mb))
non_hugepages_mb=$(($mem_total_mb - $hugepages_mb))
adopt_watermark_scale_factor
exit 0
fi

non_hugepages_mb=$(getarg rd.non_hugepages_mb=) || non_hugepages_mb=32768
hugepages=$((($mem_total_mb - $non_hugepages_mb) / $hugepagesize_mb))

mem_total_mb=$(($(sed -rn 's/MemTotal:\s+(.*) kB/\1/p' /proc/meminfo) / 1024 ))
hugepagesize_mb=$(($(sed -rn 's/Hugepagesize:\s+(.*) kB/\1/p' /proc/meminfo) / 1024 ))
nr_hugepages=$((($mem_total_mb - $non_hugepages_mb) / $hugepagesize_mb))

if [ $nr_hugepages -le 0 ]; then
if [ $hugepages -le 0 ]; then
exit 0
fi

Expand Down

0 comments on commit 5967a01

Please sign in to comment.