2013-07-30 22:21:01 +02:00
|
|
|
#!/bin/bash
|
|
|
|
# Allan Parsons (allan.parsons@gmail.com)
|
2013-12-10 23:07:28 +01:00
|
|
|
# This was a rewrite by Allan Parsons
|
|
|
|
#
|
|
|
|
# Original Author: Pasha "p01nt" Klets <pasha@klets.name>
|
|
|
|
#
|
2013-07-30 22:21:01 +02:00
|
|
|
|
|
|
|
##DEBUGDEBUG
|
|
|
|
#MUNIN_LIBDIR=/usr/share/munin
|
|
|
|
#. $MUNIN_LIBDIR/plugins/plugin.sh
|
|
|
|
#socket="/var/run/haproxy.sock"
|
|
|
|
|
|
|
|
|
2011-06-24 12:07:44 +02:00
|
|
|
|
|
|
|
name=`basename $0`
|
|
|
|
title=`echo ${name} | awk -F_ '{print $NF}'`
|
2013-07-30 22:21:01 +02:00
|
|
|
SVNAME='BACKEND'
|
|
|
|
LIST=$backend
|
2011-06-24 12:07:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
function parse_url {
|
|
|
|
# Modify ifs variable
|
|
|
|
OIFS=$IFS;
|
|
|
|
IFS=",";
|
|
|
|
PXNAME="$1"
|
|
|
|
SVNAME="$2"
|
|
|
|
VALUE="$3"
|
|
|
|
if [ ! -z "$4" ]; then
|
|
|
|
SERVERNAME="$4"
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ ! -z "$url" ]; then
|
|
|
|
LINE1=`curl -s "$url" | head -1 | sed 's/# //'`
|
2013-07-30 22:22:13 +02:00
|
|
|
LINE2=`curl -s "$url" | grep "$PXNAME" | grep -v "$PXNAME,$SVNAME" | tr ',' ' '`
|
2013-07-30 22:21:01 +02:00
|
|
|
fi
|
|
|
|
|
|
|
|
if [ ! -z "$socket" ]; then
|
|
|
|
LINE1=`echo "show stat" | socat unix-connect:"$socket" stdio | head -1 | sed 's/# //'`
|
|
|
|
LINE2=`echo "show stat" | socat unix-connect:"$socket" stdio | grep "$PXNAME" | grep -v "$PXNAME,$SVNAME" | tr ',' ' '`
|
|
|
|
|
|
|
|
#echo $CMD
|
|
|
|
#exit
|
|
|
|
fi
|
2011-06-24 12:07:44 +02:00
|
|
|
|
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
ARRAY1=($LINE1);
|
|
|
|
if [ ! -z $SERVERNAME ]; then
|
|
|
|
# Find values
|
|
|
|
for ((i=0; i<${#ARRAY1[@]}; ++i));
|
|
|
|
do
|
|
|
|
# Get data
|
|
|
|
if [[ "${ARRAY1[$i]}" == "${VALUE}" ]]; then
|
|
|
|
o=$i;
|
|
|
|
o=`expr $o + 1`
|
|
|
|
RVAL=`echo ${LINE2} | grep ${SERVERNAME} | cut -d" " -f $o`
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
else
|
|
|
|
RVAL=`echo 'show stat' | socat unix-connect:"$socket" stdio | grep "$PXNAME" | grep -v "$PXNAME,$SVNAME" | tr ',' ' ' | awk '{print $2}'`
|
|
|
|
fi
|
|
|
|
# Reset ifs
|
|
|
|
IFS=$OIFS;
|
|
|
|
|
|
|
|
## return val
|
|
|
|
echo $RVAL
|
2011-06-24 12:07:44 +02:00
|
|
|
}
|
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
##
|
|
|
|
## Main
|
|
|
|
##
|
|
|
|
|
2011-06-24 12:07:44 +02:00
|
|
|
graph_title="${title} sessions by servers"
|
|
|
|
graph_vlabel=${title}
|
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
if [ "$1" = "autoconf" ]; then
|
2018-08-02 02:03:42 +02:00
|
|
|
echo yes
|
2013-07-30 22:21:01 +02:00
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
|
|
|
if [ "$1" = "config" ]; then
|
|
|
|
|
2017-02-22 03:29:26 +01:00
|
|
|
echo "graph_category loadbalancer"
|
2013-07-30 22:21:01 +02:00
|
|
|
echo "graph_title ${graph_title}"
|
|
|
|
echo "graph_vlabel ${graph_vlabel}"
|
2013-07-30 22:55:29 +02:00
|
|
|
#echo "graph_printf %.0f"
|
2013-07-30 22:21:01 +02:00
|
|
|
|
|
|
|
for i in ${LIST}; do
|
|
|
|
SERVERLIST=$(parse_url ${i} ${SVNAME} svname)
|
|
|
|
for s in $SERVERLIST; do
|
|
|
|
echo "hsessionsbyservers_$s_`echo $i_$s | md5sum | cut -d - -f1 | sed 's/ //g'`.label $s"
|
|
|
|
echo "hsessionsbyservers_$s_`echo $i_$s | md5sum | cut -d - -f1 | sed 's/ //g'`.type DERIVE"
|
|
|
|
echo "hsessionsbyservers_$s_`echo $i_$s | md5sum | cut -d - -f1 | sed 's/ //g'`.info Active Sessions for $s"
|
|
|
|
echo "hsessionsbyservers_$s_`echo $i_$s | md5sum | cut -d - -f1 | sed 's/ //g'`.min 0"
|
|
|
|
#echo "hsessionsbyservers_$s_`echo $i_$s | md5sum | cut -d - -f1 | sed 's/ //g'`.draw AREASTACK"
|
|
|
|
done
|
|
|
|
done
|
|
|
|
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
2011-06-24 12:07:44 +02:00
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
for i in ${LIST}; do
|
2011-06-24 12:07:44 +02:00
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
SERVERLIST=$(parse_url ${i} ${SVNAME} svname)
|
2011-06-24 12:07:44 +02:00
|
|
|
|
2013-07-30 22:21:01 +02:00
|
|
|
for s in $SERVERLIST; do
|
|
|
|
val=$(parse_url ${i} ${SVNAME} scur ${s})
|
|
|
|
echo "hsessionsbyservers_$s_`echo $i_$s | md5sum| cut -d - -f1 | sed 's/ //g'`.value ${val}"
|
|
|
|
done
|
|
|
|
done
|