sh 从Linux收集统计信息
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了sh 从Linux收集统计信息相关的知识,希望对你有一定的参考价值。
#!/bin/bash
#
# Gather statistics from Linux systems
#
# Forked from https://github.com/reyjrar/graphite-scripts/blob/master/bin/linux_basic_performance_data.sh.
#
# sys.sh https://login:password@opentsdb.com/api/put
LANGUAGE=en_US
JSON_CACHE=$(mktemp)
trap 'rm -f $JSON_CACHE' INT TERM EXIT
add_metric() {
echo $1':'$2 >> $JSON_CACHE
}
get_metrics() {
cat $JSON_CACHE
}
grab_load_metrics() {
load=$(cat /proc/loadavg);
set -- $load;
add_metric "system.load.1min" "$1"
add_metric "system.load.5min" "$2"
add_metric "system.load.15min" "$3"
}
grab_cpu_metrics() {
/usr/bin/mpstat |grep '^[0-9]' | grep -v CPU | while read line; do
set -- $line
cpu=$2
add_metric "system.cpu.${cpu}.user" "$3"
add_metric "system.cpu.${cpu}.nice" "$4"
add_metric "system.cpu.${cpu}.system" "$5"
add_metric "system.cpu.${cpu}.iowait" "$6"
done
}
grab_iostat_metrics() {
iostat_line=$(iostat |awk 'FNR==4')
rc=$?;
if [ $rc -eq 0 ]; then
set -- $iostat_line;
add_metric "system.iostat.user" "$1"
add_metric "system.iostat.nice" "$2"
add_metric "system.iostat.system" "$3"
add_metric "system.iostat.iowait" "$4"
fi
}
grab_memory_metrics() {
/usr/bin/free -mb | grep -v cache | tail -2 | while read line; do
set -- $line;
k=$(echo $1 | tr [A-Z] [a-z] | sed -e s/://)
add_metric "system.memory.$k.total" "$2"
add_metric "system.memory.$k.used" "$3"
add_metric "system.memory.$k.free" "$4"
[ ! -z $5 ] && add_metric "system.memory.$k.shared" "$5"
[ ! -z $6 ] && add_metric "system.memory.$k.buffers" "$6"
[ ! -z $7 ] && add_metric "system.memory.$k.cached" "$7"
done
}
grab_disks_metrics() {
declare -r disks_prefixes
disk_prefixes=( 'sd' 'hd' 'c0d' 'c1d' )
# Disk Discovery
if [ -f /proc/partitions ]; then
while read line
do
disk=`echo $line |awk '{print $4}'`;
for prefix in "${disk_prefixes[@]}"; do
[ -z "$disk" ] && continue;
if [[ "$disk" =~ "$prefix" ]]; then
disks[${#disks[*]}]="$disk";
break
fi;
done;
done < /proc/partitions;
fi
if [ ${#disks} -gt 0 ]; then
if [ -f /proc/diskstats ]; then
while read line; do
set -- $line;
if [[ "${disks[@]}" =~ "$3" ]]; then
disk=$3
disk=${disk/\//_};
add_metric "system.disks.$disk.read.issued" "$4"
add_metric "system.disks.$disk.read.ms" "$7"
add_metric "system.disks.$disk.write.complete" "$8"
add_metric "system.disks.$disk.write.ms" "${11}"
add_metric "system.disks.$disk.io.current" "${12}"
add_metric "system.disks.$disk.io.ms" "${13}"
fi;
done < /proc/diskstats;
fi;
fi;
}
grab_fs_metrics() {
df -Pl -x tmpfs | while read line; do
set -- $line;
dev=$1;
total=$2;
used=$3;
available=$4;
percentage=$5;
path_orig=$6;
if [[ "$dev" =~ ^\/dev ]]; then
case "$path_orig" in
"/") spath="slash" ;;
"/boot") spath="boot" ;;
*) tmp=${6:1}; spath=${tmp//\//_} ;;
esac;
add_metric "system.fs.$spath.total" "$total"
add_metric "system.fs.$spath.used" "$used"
add_metric "system.fs.$spath.available" "$available"
fi
done
}
grab_tcp_connections_metrics() {
/bin/netstat -s --tcp | egrep "(connections.* opening|connexions* ouvertes)" | while read line; do
set -- $line;
add_metric "system.tcp.connections.$2" "$1"
done
tcp_failed=$(/bin/netstat -s --tcp | egrep "(failed connection attempts|tentatives de connexion.*hou)"|awk '{print $1}')
add_metric "system.tcp.connections.failed" "$tcp_failed"
}
grab_tcp_reset_packets_metrics() {
/bin/netstat -s --tcp | grep reset | grep -v due | awk '{print $1 " " $NF}' | while read line; do
set -- $line;
add_metric "system.tcp.resets.$2" "$1"
done
}
grab_udp_packets_metrics() {
/bin/netstat -s --udp | grep packets | grep -v unknown | while read line; do
set -- $line;
add_metric "system.udp.packets.$3" "$1"
done
}
kv_to_json() {
local empty="{}"
local tag=${1:-$empty}
local now=$(date +%s)
local comma=''
local tags='"tags":'"$tag"
echo '['
while read line; do
sed "s/,/./g" <<< "$line" | sed -r \
-e 's|^(.*):([0-9\.]*)$|'$comma'{"metric":"\1","timestamp":'$now',"value":\2,'$tags'}|'
comma=','
done < /dev/stdin
echo ']'
}
put() {
declare url=$1
curl -s -XPOST "$url" -d @- -w '{"status":%{http_code},"time":%{time_total}}\n'
}
main() {
declare url=${1:-"dry"}
grab_load_metrics
grab_cpu_metrics
grab_iostat_metrics
grab_memory_metrics
grab_fs_metrics
grab_disks_metrics
grab_tcp_connections_metrics
grab_tcp_reset_packets_metrics
grab_udp_packets_metrics
[[ "$url" == "dry" ]] \
&& get_metrics \
|| get_metrics \
| kv_to_json '{"hostname":"'$(hostname)'"}' \
| put $url
}
main "$@"
以上是关于sh 从Linux收集统计信息的主要内容,如果未能解决你的问题,请参考以下文章
如何从手动创建的 Firebase 动态链接中收集统计信息?