rrd数据导入mysql库
时间:2010-05-04 来源:fly_fisher
#!/bin/sh
Dir=/monitor/nagios/share/perfdata
Tmpfile=/monitor/nagios/shell/rrd-mysql/tmp/tempfile.txt
Cpulistfile=/monitor/nagios/shell/rrd-mysql/list/Cpulistfile.txt
Connlistfile=/monitor/nagios/shell/rrd-mysql/list/Connlistfile.txt
Resultdir=/monitor/nagios/shell/rrd-mysql/log
tmptdatefile=/monitor/nagios/shell/rrd-mysql/tmp/tmptdatefile.txt
tmpresultfile=/monitor/nagios/shell/rrd-mysql/tmp/tmpresultfile.txt
mailfile=/monitor/nagios/shell/rrd-mysql/tmp/mail.txt
Savedate=$(date +%Y%m%d)
Nowdate=$(date +%s)
typeset a=$(expr ${Nowdate} - 1269964500)
let b=$(expr $a / 86400)
enddate=`echo "(1269964500+$b*86400)" | bc`
function Getrrddate
{
/usr/local/rrdtool-1.2.6/bin/rrdtool fetch $i --start end-1day --end $enddate AVERAGE -r 300 > $Tmpfile
Hostlist=$(echo $i | awk -F "/" '{print $6}')
Servicelist=$(echo $i | awk -F "/" '{print $7}'| sed -e 's/.rrd//g')
Channel=$(echo $i | awk -F "/" '{print $6}' | sed -e 's/[0-9]//g' )
/bin/cat $Tmpfile | sed -n '3,$p' | awk -F ":" '{print $1}' | xargs -i date -d '1970-01-01 UTC {} seconds' +"%Y-%m-%d %T" > $tmptdatefile
/bin/cat $Tmpfile | sed -n '3,$p' | awk -F " " '{print $2}' | xargs -i printf "%.2f\n" {} > $tmpresultfile
/usr/bin/paste $tmptdatefile $tmpresultfile > $Tmpfile
/bin/sed '/\+/s//,/g' $Tmpfile | awk '{print "'"$Hostlist"'"",""'"$Servicelist"'"",""'"$Channel"'"","$1" "$2","$3}' >> ${Resultdir}/${Servicelist}${Savedate}.log
}
/bin/ls -l /monitor/nagios/share/perfdata/*/*.rrd | awk -F " " '{print $9}' | grep "CPU" > $Cpulistfile
/bin/ls -l /monitor/nagios/share/perfdata/*/*.rrd | awk -F " " '{print $9}' | grep -i "IP_conn" > $Connlistfile
#for i in `/bin/cat ${Cpulistfile}`
# do
# Modifydate=$(/usr/bin/stat $i | grep Modify | awk '{print $2}')
# log_time=$(date -d "${Modifydate}" +%s)
# Tmpdate=$(( $Nowdate-$log_time ))
# Tmphost=$(echo $i | awk -F "/" '{print $6}')
# if [ "$c" -gt "86400" ]; then
# /bin/sed -i "/$Tmphost/d" ${Cpulistfile}
# fi
#done
#for i in `/bin/cat ${Connlistfile}`
# do
# Modifydate=$(/usr/bin/stat $i | grep Modify | awk '{print $2}')
# log_time=$(date -d "${Modifydate}" +%s)
# Tmpdate=$(( $Nowdate-$log_time ))
# Tmphost=$(echo $i | awk -F "/" '{print $6}')
# if [ "$c" -gt "86400" ]; then
# /bin/sed -i "/$Tmphost/d" ${Connlistfile}
# fi
#done
for i in `/bin/cat ${Cpulistfile}`
do
Getrrddate
done
for i in `/bin/cat ${Connlistfile}`
do
Getrrddate
done
/bin/sed -i "/nan/d" ${Resultdir}/*${Savedate}.log
/usr/local/mysql/bin/mysql -u root -D rrddate -pbj7n48dM <<EOF
load data infile '${Resultdir}/CPU_Load${Savedate}.log' into table daily_cpu_load FIELDS TERMINATED BY ',';
load data infile '${Resultdir}/IP_conns${Savedate}.log' into table daily_ip_conn FIELDS TERMINATED BY ',';
EOF
脚本想法:
由于rrd是个循环数据库,希望将数据导入mysql库,精确数据。
解决问题:
1.时间格式转换 原格式为1269964500是据1970年1月1日的秒数要转换成2010-03-30 23:55格式(数据库里定义)
date -d '1970-01-01 UTC 1269964500 seconds' +"%Y-%m-%d %T"
2.数字格式转换 原格式为指数格式转换为小数点10进制格式
printf "%.2f\n"
3.数据重复问题
定义一个时间1269964500,采用据当前时间的天数*一天的秒数86400,每日执行时依据此规则和定义时间 相加。
4.错误数据 nan清除
/bin/sed -i "/nan/d"
5.判断更新时间(注释掉部分),由于清除了nan数据和定义了时间,暂时没必要做了。
Dir=/monitor/nagios/share/perfdata
Tmpfile=/monitor/nagios/shell/rrd-mysql/tmp/tempfile.txt
Cpulistfile=/monitor/nagios/shell/rrd-mysql/list/Cpulistfile.txt
Connlistfile=/monitor/nagios/shell/rrd-mysql/list/Connlistfile.txt
Resultdir=/monitor/nagios/shell/rrd-mysql/log
tmptdatefile=/monitor/nagios/shell/rrd-mysql/tmp/tmptdatefile.txt
tmpresultfile=/monitor/nagios/shell/rrd-mysql/tmp/tmpresultfile.txt
mailfile=/monitor/nagios/shell/rrd-mysql/tmp/mail.txt
Savedate=$(date +%Y%m%d)
Nowdate=$(date +%s)
typeset a=$(expr ${Nowdate} - 1269964500)
let b=$(expr $a / 86400)
enddate=`echo "(1269964500+$b*86400)" | bc`
function Getrrddate
{
/usr/local/rrdtool-1.2.6/bin/rrdtool fetch $i --start end-1day --end $enddate AVERAGE -r 300 > $Tmpfile
Hostlist=$(echo $i | awk -F "/" '{print $6}')
Servicelist=$(echo $i | awk -F "/" '{print $7}'| sed -e 's/.rrd//g')
Channel=$(echo $i | awk -F "/" '{print $6}' | sed -e 's/[0-9]//g' )
/bin/cat $Tmpfile | sed -n '3,$p' | awk -F ":" '{print $1}' | xargs -i date -d '1970-01-01 UTC {} seconds' +"%Y-%m-%d %T" > $tmptdatefile
/bin/cat $Tmpfile | sed -n '3,$p' | awk -F " " '{print $2}' | xargs -i printf "%.2f\n" {} > $tmpresultfile
/usr/bin/paste $tmptdatefile $tmpresultfile > $Tmpfile
/bin/sed '/\+/s//,/g' $Tmpfile | awk '{print "'"$Hostlist"'"",""'"$Servicelist"'"",""'"$Channel"'"","$1" "$2","$3}' >> ${Resultdir}/${Servicelist}${Savedate}.log
}
/bin/ls -l /monitor/nagios/share/perfdata/*/*.rrd | awk -F " " '{print $9}' | grep "CPU" > $Cpulistfile
/bin/ls -l /monitor/nagios/share/perfdata/*/*.rrd | awk -F " " '{print $9}' | grep -i "IP_conn" > $Connlistfile
#for i in `/bin/cat ${Cpulistfile}`
# do
# Modifydate=$(/usr/bin/stat $i | grep Modify | awk '{print $2}')
# log_time=$(date -d "${Modifydate}" +%s)
# Tmpdate=$(( $Nowdate-$log_time ))
# Tmphost=$(echo $i | awk -F "/" '{print $6}')
# if [ "$c" -gt "86400" ]; then
# /bin/sed -i "/$Tmphost/d" ${Cpulistfile}
# fi
#done
#for i in `/bin/cat ${Connlistfile}`
# do
# Modifydate=$(/usr/bin/stat $i | grep Modify | awk '{print $2}')
# log_time=$(date -d "${Modifydate}" +%s)
# Tmpdate=$(( $Nowdate-$log_time ))
# Tmphost=$(echo $i | awk -F "/" '{print $6}')
# if [ "$c" -gt "86400" ]; then
# /bin/sed -i "/$Tmphost/d" ${Connlistfile}
# fi
#done
for i in `/bin/cat ${Cpulistfile}`
do
Getrrddate
done
for i in `/bin/cat ${Connlistfile}`
do
Getrrddate
done
/bin/sed -i "/nan/d" ${Resultdir}/*${Savedate}.log
/usr/local/mysql/bin/mysql -u root -D rrddate -pbj7n48dM <<EOF
load data infile '${Resultdir}/CPU_Load${Savedate}.log' into table daily_cpu_load FIELDS TERMINATED BY ',';
load data infile '${Resultdir}/IP_conns${Savedate}.log' into table daily_ip_conn FIELDS TERMINATED BY ',';
EOF
脚本想法:
由于rrd是个循环数据库,希望将数据导入mysql库,精确数据。
解决问题:
1.时间格式转换 原格式为1269964500是据1970年1月1日的秒数要转换成2010-03-30 23:55格式(数据库里定义)
date -d '1970-01-01 UTC 1269964500 seconds' +"%Y-%m-%d %T"
2.数字格式转换 原格式为指数格式转换为小数点10进制格式
printf "%.2f\n"
3.数据重复问题
定义一个时间1269964500,采用据当前时间的天数*一天的秒数86400,每日执行时依据此规则和定义时间 相加。
4.错误数据 nan清除
/bin/sed -i "/nan/d"
5.判断更新时间(注释掉部分),由于清除了nan数据和定义了时间,暂时没必要做了。
相关阅读 更多 +