diff --git a/apache_status/python_modules/apache_status.py b/apache_status/python_modules/apache_status.py index 5d2ad522..d81a6299 100755 --- a/apache_status/python_modules/apache_status.py +++ b/apache_status/python_modules/apache_status.py @@ -68,7 +68,7 @@ def get_metrics(): req = urllib2.Request(SERVER_STATUS_URL + "?auto") # Download the status file - res = urllib2.urlopen(req) + res = urllib2.urlopen(req, None, 2) for line in res: split_line = line.rstrip().split(": ") @@ -93,7 +93,7 @@ def get_metrics(): req2 = urllib2.Request(SERVER_STATUS_URL) # Download the status file - res = urllib2.urlopen(req2) + res = urllib2.urlopen(req2, None, 2) for line in res: regMatch = SSL_REGEX.match(line) diff --git a/couchdb/python_modules/couchdb.py b/couchdb/python_modules/couchdb.py index a24f21f4..21589f8b 100644 --- a/couchdb/python_modules/couchdb.py +++ b/couchdb/python_modules/couchdb.py @@ -54,7 +54,8 @@ def _get_couchdb_stats(url, refresh_rate): logging.warning('The specified refresh_rate of %d is invalid and has been substituted with 60!' % refresh_rate) url += '?range=60' - c = urllib2.urlopen(url) + # Set time out for urlopen to 2 seconds otherwise we run into the possibility of hosing gmond + c = urllib2.urlopen(url, None, 2) json_data = c.read() c.close() diff --git a/elasticsearch/python_modules/elasticsearch.py b/elasticsearch/python_modules/elasticsearch.py index 1f1444da..b94f78c1 100755 --- a/elasticsearch/python_modules/elasticsearch.py +++ b/elasticsearch/python_modules/elasticsearch.py @@ -127,7 +127,7 @@ def update_result(result, url): diff = now - last_update if diff > 20: print '[elasticsearch] ' + str(diff) + ' seconds passed - Fetching ' + url - result = json.load(urllib.urlopen(url)) + result = json.load(urllib.urlopen(url, None, 2)) last_update = now return result @@ -199,7 +199,7 @@ def metric_init(params): # First iteration - Grab statistics print('[elasticsearch] Fetching ' + url_cluster) - result = json.load(urllib.urlopen(url_cluster)) + result = json.load(urllib.urlopen(url_cluster, None, 2)) metric_group = params.get('metric_group', 'elasticsearch') @@ -220,7 +220,7 @@ def metric_init(params): url_indices = '{0}{1}/_stats'.format(host, index) print('[elasticsearch] Fetching ' + url_indices) - r_indices = json.load(urllib.urlopen(url_indices)) + r_indices = json.load(urllib.urlopen(url_indices, None, 2)) descriptors += get_indices_descriptors(index, Desc_Skel, r_indices, diff --git a/httpd/python_modules/httpd.py b/httpd/python_modules/httpd.py index 10845e65..a6b44570 100644 --- a/httpd/python_modules/httpd.py +++ b/httpd/python_modules/httpd.py @@ -74,7 +74,7 @@ def update_stats(): try: httpd_stats = {} logging.debug(' opening URL: ' + str(STATUS_URL)) - f = urllib.urlopen(STATUS_URL) + f = urllib.urlopen(STATUS_URL, None, 2) for line in f.readlines(): diff = False diff --git a/jenkins/python_modules/jenkins.py b/jenkins/python_modules/jenkins.py index e7cef064..ac4fffbf 100644 --- a/jenkins/python_modules/jenkins.py +++ b/jenkins/python_modules/jenkins.py @@ -52,7 +52,7 @@ def _get_jenkins_statistics(url): url += '/api/json' url += '?tree=jobs[color],overallLoad[busyExecutors[min[latest]],queueLength[min[latest]],totalExecutors[min[latest]]]' - c = urllib2.urlopen(url) + c = urllib2.urlopen(url, None, 2) json_data = c.read() c.close() diff --git a/network/netstats/conf.d/netstats.pyconf b/network/netstats/conf.d/netstats.pyconf index f3dec863..d1abc76a 100644 --- a/network/netstats/conf.d/netstats.pyconf +++ b/network/netstats/conf.d/netstats.pyconf @@ -25,6 +25,11 @@ collection_group { name_match = "ip_(.+)" value_threshold = 1.0 } + metric { + name_match = "icmpmsg_(.+)" + value_threshold = 1.0 + } + metric { name_match = "icmp_(.+)" value_threshold = 1.0 diff --git a/nginx_status/python_modules/nginx_status.py b/nginx_status/python_modules/nginx_status.py index 12839b36..08e70dd8 100644 --- a/nginx_status/python_modules/nginx_status.py +++ b/nginx_status/python_modules/nginx_status.py @@ -49,7 +49,7 @@ def run(self): @staticmethod def _get_nginx_status_stub_response(url): - c = urllib2.urlopen(url) + c = urllib2.urlopen(url, None, 2) data = c.read() c.close() diff --git a/rabbit/python_modules/rabbitmq.py b/rabbit/python_modules/rabbitmq.py index e8976af6..2d89eaae 100644 --- a/rabbit/python_modules/rabbitmq.py +++ b/rabbit/python_modules/rabbitmq.py @@ -99,7 +99,7 @@ def refreshStats(stats = ('nodes', 'queues'), vhosts = ['/']): result_dict = {} urlstring = url_template.safe_substitute(stats = stat, vhost = vhost) print urlstring - result = json.load(urllib.urlopen(urlstring)) + result = json.load(urllib.urlopen(urlstring, None, 2)) # Rearrange results so entry is held in a dict keyed by name - queue name, host name, etc. if stat in ("queues", "nodes", "exchanges"): for entry in result: diff --git a/riak/riak.py b/riak/riak.py index f61a882f..2543802e 100644 --- a/riak/riak.py +++ b/riak/riak.py @@ -63,7 +63,7 @@ def run(self): def update_metric(self): try: req = urllib2.Request(url = self.url) - res = urllib2.urlopen(req) + res = urllib2.urlopen(req, None, 2) stats = res.read() dprint("%s", stats) json_stats = json.loads(stats)