Skip to content

Commit

Permalink
Add timeouts to urllib2 calls otherwise we run into situations where …
Browse files Browse the repository at this point in the history
…gmond is hosed
  • Loading branch information
vvuksan committed Nov 7, 2012
1 parent 4828cb4 commit 246721e
Show file tree
Hide file tree
Showing 9 changed files with 17 additions and 11 deletions.
4 changes: 2 additions & 2 deletions apache_status/python_modules/apache_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def get_metrics():
req = urllib2.Request(SERVER_STATUS_URL + "?auto")

# Download the status file
res = urllib2.urlopen(req)
res = urllib2.urlopen(req, None, 2)

for line in res:
split_line = line.rstrip().split(": ")
Expand All @@ -93,7 +93,7 @@ def get_metrics():
req2 = urllib2.Request(SERVER_STATUS_URL)

# Download the status file
res = urllib2.urlopen(req2)
res = urllib2.urlopen(req2, None, 2)

for line in res:
regMatch = SSL_REGEX.match(line)
Expand Down
3 changes: 2 additions & 1 deletion couchdb/python_modules/couchdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ def _get_couchdb_stats(url, refresh_rate):
logging.warning('The specified refresh_rate of %d is invalid and has been substituted with 60!' % refresh_rate)
url += '?range=60'

c = urllib2.urlopen(url)
# Set time out for urlopen to 2 seconds otherwise we run into the possibility of hosing gmond
c = urllib2.urlopen(url, None, 2)
json_data = c.read()
c.close()

Expand Down
6 changes: 3 additions & 3 deletions elasticsearch/python_modules/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def update_result(result, url):
diff = now - last_update
if diff > 20:
print '[elasticsearch] ' + str(diff) + ' seconds passed - Fetching ' + url
result = json.load(urllib.urlopen(url))
result = json.load(urllib.urlopen(url, None, 2))
last_update = now

return result
Expand Down Expand Up @@ -199,7 +199,7 @@ def metric_init(params):

# First iteration - Grab statistics
print('[elasticsearch] Fetching ' + url_cluster)
result = json.load(urllib.urlopen(url_cluster))
result = json.load(urllib.urlopen(url_cluster, None, 2))

metric_group = params.get('metric_group', 'elasticsearch')

Expand All @@ -220,7 +220,7 @@ def metric_init(params):
url_indices = '{0}{1}/_stats'.format(host, index)
print('[elasticsearch] Fetching ' + url_indices)

r_indices = json.load(urllib.urlopen(url_indices))
r_indices = json.load(urllib.urlopen(url_indices, None, 2))
descriptors += get_indices_descriptors(index,
Desc_Skel,
r_indices,
Expand Down
2 changes: 1 addition & 1 deletion httpd/python_modules/httpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def update_stats():
try:
httpd_stats = {}
logging.debug(' opening URL: ' + str(STATUS_URL))
f = urllib.urlopen(STATUS_URL)
f = urllib.urlopen(STATUS_URL, None, 2)

for line in f.readlines():
diff = False
Expand Down
2 changes: 1 addition & 1 deletion jenkins/python_modules/jenkins.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def _get_jenkins_statistics(url):
url += '/api/json'
url += '?tree=jobs[color],overallLoad[busyExecutors[min[latest]],queueLength[min[latest]],totalExecutors[min[latest]]]'

c = urllib2.urlopen(url)
c = urllib2.urlopen(url, None, 2)
json_data = c.read()
c.close()

Expand Down
5 changes: 5 additions & 0 deletions network/netstats/conf.d/netstats.pyconf
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,11 @@ collection_group {
name_match = "ip_(.+)"
value_threshold = 1.0
}
metric {
name_match = "icmpmsg_(.+)"
value_threshold = 1.0
}

metric {
name_match = "icmp_(.+)"
value_threshold = 1.0
Expand Down
2 changes: 1 addition & 1 deletion nginx_status/python_modules/nginx_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def run(self):

@staticmethod
def _get_nginx_status_stub_response(url):
c = urllib2.urlopen(url)
c = urllib2.urlopen(url, None, 2)
data = c.read()
c.close()

Expand Down
2 changes: 1 addition & 1 deletion rabbit/python_modules/rabbitmq.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def refreshStats(stats = ('nodes', 'queues'), vhosts = ['/']):
result_dict = {}
urlstring = url_template.safe_substitute(stats = stat, vhost = vhost)
print urlstring
result = json.load(urllib.urlopen(urlstring))
result = json.load(urllib.urlopen(urlstring, None, 2))
# Rearrange results so entry is held in a dict keyed by name - queue name, host name, etc.
if stat in ("queues", "nodes", "exchanges"):
for entry in result:
Expand Down
2 changes: 1 addition & 1 deletion riak/riak.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def run(self):
def update_metric(self):
try:
req = urllib2.Request(url = self.url)
res = urllib2.urlopen(req)
res = urllib2.urlopen(req, None, 2)
stats = res.read()
dprint("%s", stats)
json_stats = json.loads(stats)
Expand Down

0 comments on commit 246721e

Please sign in to comment.