Skip to content

Instantly share code, notes, and snippets.

@irom77
Forked from sourceperl/th_pinger.py
Last active December 1, 2020 16:47
Show Gist options
  • Save irom77/794c18ba392e42e944b09c42493b1786 to your computer and use it in GitHub Desktop.
Save irom77/794c18ba392e42e944b09c42493b1786 to your computer and use it in GitHub Desktop.
Python script for do multi-threaded ping
#!/usr/bin/env python
# ping a list of host with threads for increase speed
# use standard linux /bin/ping utility
from threading import Thread
import subprocess
import Queue
import re
# some global vars
num_threads = 15
ips_q = Queue.Queue()
out_q = Queue.Queue()
# build IP array
ips = []
for i in range(1,200):
ips.append("163.111.168."+str(i))
# thread code : wraps system ping command
def thread_pinger(i, q):
"""Pings hosts in queue"""
while True:
# get an IP item form queue
ip = q.get()
# ping it
args=['/bin/ping', '-c', '1', '-W', '1', str(ip)]
# p_ping = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE)
p_ping = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE , stderr=subprocess.STDOUT )
# save ping stdout
p_ping_out = p_ping.communicate()[0]
if (p_ping.wait() == 0):
# rtt min/avg/max/mdev = 22.293/22.293/22.293/0.000 ms
search = re.search(r'rtt min/avg/max/mdev = (.*)/(.*)/(.*)/(.*) ms',
p_ping_out, re.M|re.I)
ping_rtt = search.group(2)
out_q.put("OK " + str(ip) + " rtt= "+ ping_rtt)
# update queue : this ip is processed
q.task_done()
# start the thread pool
for i in range(num_threads):
worker = Thread(target=thread_pinger, args=(i, ips_q))
worker.setDaemon(True)
worker.start()
# fill queue
for ip in ips:
ips_q.put(ip)
# wait until worker threads are done to exit
ips_q.join()
# print result
while True:
try:
msg = out_q.get_nowait()
except Queue.Empty:
break
print msg
#!/usr/bin/env python
# ping a list of host with threads for increase speed
# design to use data from/to SQL database
# use standard linux /bin/ping utility
from threading import Thread
import mysql.connector
import subprocess
import Queue
import time
import re
# some global vars
num_threads = 30
ips_q = Queue.Queue()
out_q = Queue.Queue()
# thread code : wraps system ping command
def thread_pinger(i, q):
"""Pings hosts in queue"""
while True:
# get an IP item form queue
item = q.get()
# ping it
args=['/bin/ping', '-c', '1', '-W', str(item['timeout']),
str(item['ip'])]
p_ping = subprocess.Popen(args,
shell=False,
stdout=subprocess.PIPE)
# save ping stdout
p_ping_out = p_ping.communicate()[0]
# ping return 0 if up
if (p_ping.wait() == 0):
# rtt min/avg/max/mdev = 22.293/22.293/22.293/0.000 ms
search = re.search(r'rtt min/avg/max/mdev = (.*)/(.*)/(.*)/(.*) ms',
p_ping_out, re.M|re.I)
item['up'] = True
item['rtt'] = search.group(2)
else:
item['up'] = False
# update output queue
out_q.put(item)
# update queue : this ip is processed
q.task_done()
# start the thread pool
for i in range(num_threads):
worker = Thread(target=thread_pinger, args=(i, ips_q))
worker.setDaemon(True)
worker.start()
# build IP array
ips = []
for i in range(1,200):
ips.append("192.168.1."+str(i))
# main loop
while True:
# retreive data from DB
# add SQL here
# test start time
start = time.time()
# fill queue
for ip in ips:
ips_q.put({'ip': ip, 'timeout': 1})
# wait until worker threads are done to exit
ips_q.join()
# display result
print("next:")
while True:
try:
msg = out_q.get_nowait()
except Queue.Empty:
break
if msg['up']:
print(msg)
# test start end
end = time.time()
loop_time = round(end - start, 2)
print("loop time: %s" % (loop_time))
# update DB
#add SQL here
# wait 5s before next cycle
time.sleep(5.0)
@3msoft-3mh8r
Copy link

i searched for a solution to the problem and I found the solution in adding the stderr=subprocess.STDOUT Property at the line (30) on th_pinger.py .

the fixed code will be like below :
p_ping = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE , stderr=subprocess.STDOUT )

In conclusion, I want to thank you for this wonderful code 👍

@irom77
Copy link
Author

irom77 commented Dec 1, 2020

Thank you !

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment