nagios 监控hadoop脚本

1、[root@localhost libexec]# cat check_hadoop_dfs.py 
'''
Created on 20131101


@author: yangyang.feng
'''
#!/usr/bin/env python


import commands
import sys
from optparse import OptionParser
import urllib
import re


def get_dfs_free_percent():
    urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
    html = urlItem.read()
    urlItem.close()
    return float(re.findall('.+<td id="col1"> DFS Remaining%<td id="col2"> :<td id="col3">\\s+(.+)%<tr class="rowNormal">.+', html)[0])


if __name__ == '__main__':


    parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
    parser.add_option("-w", "--warning", type="int", dest="w", default=30, help="total dfs used percent")
    parser.add_option("-c", "--critical", type="int", dest="c", default=20, help="total dfs used percent")
    (options, args) = parser.parse_args()


    if(options.c >= options.w):
        print '-w must greater then -c'
        sys.exit(1)


    dfs_free_percent = get_dfs_free_percent()


    if(dfs_free_percent <= options.c ) :
print 'CRITICAL HADOOP DFS. free %d%%' %(dfs_free_percent)
        sys.exit(2)
    elif(dfs_free_percent <= options.w):
        print 'WARNING HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(1)
    else:   
        print 'OK HADOOP DFS. free %d%%' %(dfs_free_percent) 
sys.exit(0)

[root@localhost libexec]# 


2、[root@localhost libexec]# cat check_hadoop_datanode.py 
'''
Created on 20131101


@author: yangyang.feng
'''
#!/usr/bin/env python


import commands
import sys
from optparse import OptionParser
import urllib
import re


def get_value():
    urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
    html = urlItem.read()
    urlItem.close()
    return float(re.findall('.+Live Nodes</a> <td id="col2"> :<td id="col3">\\s+(\d+)<tr class="rowAlt">.+', html)[0])


if __name__ == '__main__':


    parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
    parser.add_option("-w", "--warning", type="int", dest="w", default=1)
    parser.add_option("-c", "--critical", type="int", dest="c", default=2)
    (options, args) = parser.parse_args()


    if(options.c >= options.w):
        print '-w must greater then -c'
        sys.exit(1)


    value = get_value()


    if(value <= options.c ) :
print 'CRITICAL - Live Nodes %d' %(value)
        sys.exit(2)
    elif(value <= options.w):
        print 'WARNING - Live Nodes %d' %(value)
sys.exit(1)
    else:   
        print 'OK - Live Nodes %d' %(value) 
sys.exit(0)
[root@localhost libexec]#


3、[root@localhost libexec]# cat check_hadoop_tasktracker.py 
'''
Created on 20131101


@author: yangyang.feng
'''
#!/usr/bin/env python


import commands
import sys
from optparse import OptionParser
import urllib
import re


def get_value():
    urlItem = urllib.urlopen("http://192.168.20.201:50030/jobtracker.jsp")
    html = urlItem.read()
    urlItem.close()
    return float(re.findall('.+<a href="machines.jsp\?type=active">(\d+)</a>.+', html)[0])


if __name__ == '__main__':


    parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
    parser.add_option("-w", "--warning", type="int", dest="w", default=2)
    parser.add_option("-c", "--critical", type="int", dest="c", default=1)
    (options, args) = parser.parse_args()


    if(options.c >= options.w):
        print '-w must greater then -c'
        sys.exit(1)


    value = get_value()


    if(value <= options.c ) :
print 'CRITICAL HADOOP Live Tasktracker: %d' %(value)
        sys.exit(2)
    elif(value <= options.w):
        print 'WARNING HADOOP Live Tasktracker: %d' %(value)
sys.exit(1)
    else:   
        print 'OK HADOOP Live Tasktracker: %d' %(value) 
sys.exit(0)
[root@localhost libexec]#

猜你喜欢

转载自blog.csdn.net/u011648187/article/details/14000705
今日推荐