2
0
Fork 0
mirror of https://github.com/munin-monitoring/contrib.git synced 2018-11-08 00:59:34 +01:00
contrib-munin/plugins/celery/celery_tasks

134 lines
3 KiB
Text
Raw Normal View History

2010-06-08 08:43:40 +02:00
#!/usr/bin/env python
2012-07-18 00:30:54 +02:00
2010-06-08 08:43:40 +02:00
"""=cut
=head1 NAME
2012-07-18 00:30:54 +02:00
celery_tasks - Munin plugin to monitor the number of Celery tasks with specified names.
2010-06-08 08:43:40 +02:00
=head1 REQUIREMENTS
- Python
- celery (http://celeryproject.org/)
- celerymon (http://github.com/ask/celerymon)
Note: don't forget to enable sending of the events on the celery daemon - run it with the --events option
=head1 CONFIGURATION
Default configuration:
2012-07-18 00:30:54 +02:00
None
2010-06-08 08:43:40 +02:00
2012-07-18 00:30:54 +02:00
You must set the name of at least one task you want to monitor (multiple names are separated by a comma).
2010-06-08 08:43:40 +02:00
For example:
[celery_tasks]
2012-07-18 00:30:54 +02:00
env.tasks myapp.tasks.SendEmailTask,myapp2.tasks.FetchUserDataTask
2010-06-08 08:43:40 +02:00
2012-07-18 00:30:54 +02:00
This would monitor the number of task for a task with name "myapp.tasks.SendEmailTask" and "myapp2.tasks.FetchUserDataTask".
2010-06-08 08:43:40 +02:00
=head1 MAGIC MARKERS
#%# family=manual
#%# capabilities=autoconf
=head1 AUTHOR
Tomaz Muraus (http://github.com/Kami/munin-celery)
=head1 LICENSE
GPLv2
=cut"""
import os
import sys
import urllib
try:
import json
except:
import simplejson as json
API_URL = 'http://localhost:8989'
URL_ENDPOINTS = {
'workers': '/api/worker/',
'worker_tasks': '/api/worker/%s/tasks',
'tasks': '/api/task/',
'task_names': '/api/task/name/',
'task_details': '/api/task/name/%s',
}
TASK_STATES = (
'PENDING',
'RECEIVED',
'STARTED',
'SUCCESS',
'FAILURE',
'REVOKED',
'RETRY'
2010-06-08 08:43:40 +02:00
)
def get_data(what, api_url, *args):
try:
request = urllib.urlopen('%s%s' % (api_url, \
URL_ENDPOINTS[what] % (args)))
response = request.read()
return json.loads(response)
except IOError:
print 'Could not connect to the celerymon webserver'
sys.exit(-1)
def check_web_server_status(api_url):
try:
request = urllib.urlopen(api_url)
response = request.read()
except IOError:
print 'Could not connect to the celerymon webserver'
sys.exit(-1)
2012-07-18 00:30:54 +02:00
def clean_task_name(task_name):
return task_name.replace('.', '_')
2010-06-08 08:43:40 +02:00
# Config
2012-07-18 00:30:54 +02:00
def print_config(task_names):
print 'graph_title Celery tasks'
2010-06-08 08:43:40 +02:00
print 'graph_args --lower-limit 0'
print 'graph_scale no'
print 'graph_vlabel tasks per ${graph_period}'
print 'graph_category celery'
2012-07-18 00:30:54 +02:00
for name in task_names:
print '%s.label %s' % (clean_task_name(name), name)
print '%s.type DERIVE' % (clean_task_name(name))
print '%s.min 0' % (clean_task_name(name))
print '%s.info number of %s tasks' % (clean_task_name(name), name)
2012-07-18 00:30:54 +02:00
# Values
def print_values(task_names = None, api_url = None):
for task_name in task_names:
count = len(get_data('task_details', api_url, task_name))
print '%s.value %d' % (clean_task_name(task_name), count)
2010-06-08 08:43:40 +02:00
if __name__ == '__main__':
2012-07-18 00:30:54 +02:00
task_names = os.environ.get('tasks', None)
2010-06-08 08:43:40 +02:00
api_url = os.environ.get('api_url', API_URL)
check_web_server_status(api_url)
2012-07-18 00:30:54 +02:00
if not task_names:
print 'You need to define at least one task name'
sys.exit(-1)
task_names = task_names.split(',')
2010-06-08 08:43:40 +02:00
if len(sys.argv) > 1:
if sys.argv[1] == 'config':
2012-07-18 00:30:54 +02:00
print_config(task_names)
2010-06-08 08:43:40 +02:00
elif sys.argv[1] == 'autoconf':
print 'yes'
else:
2012-07-18 00:30:54 +02:00
print_values(task_names, api_url)