Last active
August 29, 2015 14:04
-
-
Save Tantas/694c771c4b6f656841be to your computer and use it in GitHub Desktop.
Performs the identical speed test to the speed test widget found at http://www.acanac.ca/speedtest/.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
#=============================================================================== | |
# FILE: acanac_speed_test.sh | |
# | |
# USAGE: acanac_speed_test.sh (-c) | |
# | |
# DESCRIPTION: Performs the identical speed test to the speed test widget found | |
# at http://www.acanac.ca/speedtest/. | |
# | |
# OPTIONS: -c changes the output to be 'epoch-timestamp, down, up' | |
# REQUIREMENTS: cURL, bc, parallel | |
# BUGS: --- | |
# NOTES: Make sure you are getting what you payed for. We will even use | |
# _their_ speed test service. Example cron job for producing a | |
# CSV for graphing is : | |
# acanac_speed_test.sh -c >> /var/log/acanac_speed_test.log.csv | |
# | |
# AUTHOR: Joseph Preiss <[email protected]> | |
# COMPANY: --- | |
# VERSION: 1.0 | |
# CREATED: 08.06.2014 | |
# REVISION: --- | |
#=============================================================================== | |
ACANAC_SPEED_TEST_URL='http://www.acanac.ca/speedtest/speedtest/' | |
# Verify dependencies exist. | |
command -v curl >/dev/null 2>&1 || \ | |
{ echo >&2 "Missing dependency curl. Aborting."; exit 1; } | |
command -v bc >/dev/null 2>&1 || \ | |
{ echo >&2 "Missing dependency bc. Aborting."; exit 1; } | |
command -v parallel >/dev/null 2>&1 || \ | |
{ echo >&2 "Missing dependency parallel. Aborting."; exit 1; } | |
# Check the arguments to see if the output will be a CSV row. | |
CSV_MODE=false | |
ARGV="$@" | |
case ${ARGV} in | |
-c) | |
CSV_MODE=true | |
;; | |
esac | |
#=============================================================================== | |
# Download Speed Test | |
#=============================================================================== | |
# cURL arguments for producing average download speed. | |
# Great article http://archive09.linux.com/feature/57715 | |
CURL_ARGS="-s -o /dev/null -w '%{speed_download} '" | |
# Download two copies of the small download in parallel. | |
EPOCH_TIME=$(date +%s) | |
SMALL_DOWNLOAD_AVERAGE_SPEEDS=$(parallel --no-notice curl ${CURL_ARGS} ::: \ | |
"${ACANAC_SPEED_TEST_URL}random350x350.jpg?x=${EPOCH_TIME}-1" \ | |
"${ACANAC_SPEED_TEST_URL}random350x350.jpg?x=${EPOCH_TIME}-2") | |
# Take the average of the two download processes. | |
SPEEDS=(${SMALL_DOWNLOAD_AVERAGE_SPEEDS//\ / }) | |
SMALL_AVERAGE=$(bc -l <<< "scale=2;(${SPEEDS[0]}+${SPEEDS[1]})") | |
# Download two copies of the large download in parallel. | |
EPOCH_TIME=$(date +%s) | |
LARGE_DOWNLOAD_AVERAGE_SPEEDS=$(parallel --no-notice curl ${CURL_ARGS} ::: \ | |
"${ACANAC_SPEED_TEST_URL}random1500x1500.jpg?x=${EPOCH_TIME}-1" \ | |
"${ACANAC_SPEED_TEST_URL}random1500x1500.jpg?x=${EPOCH_TIME}-2") | |
# Take the average of the two download processes. | |
SPEEDS=(${LARGE_DOWNLOAD_AVERAGE_SPEEDS//\ / }) | |
LARGE_AVERAGE=$(bc -l <<< "scale=2;(${SPEEDS[0]}+${SPEEDS[1]})") | |
# Total download average. | |
TOTAL_DOWNLOAD_AVERAGE=$(bc -l <<< "scale=2;(${SMALL_AVERAGE}+${LARGE_AVERAGE})/2") | |
# Bytes->bits, bits->kilobits->megabits. | |
SPEED_DOWNLOAD=$(bc -l <<< "scale=2;(${TOTAL_DOWNLOAD_AVERAGE}*8/(1000*1000))") | |
# Display the download speed in a human readable format. | |
if [ "$CSV_MODE" == "false" ] ; then | |
echo "${SPEED_DOWNLOAD}Mbps down" | |
fi | |
#=============================================================================== | |
# Upload Speed Test | |
#=============================================================================== | |
# Must use a temporary file to store the upload payload because of long length. | |
TEMP_FILE="/tmp/post_data" | |
# cURL arguments for producing average upload speed. | |
CURL_UPLOAD_ARGS="-s -w '%{speed_upload} ' -o /dev/null --data "@${TEMP_FILE}"" | |
# Payload length extracted from the raw POST during a speed test. | |
UPLOAD_PAYLOAD_SIZE=201758 | |
# Upload speeds from each pair execution. | |
LIST_UPLOAD_SPEEDS="" | |
for i in 0 1 2 3 4 5 | |
do | |
# Create a random payload and store in a temporary file. | |
echo "content1:$(LC_ALL=C tr -dc 'A-Z' < /dev/urandom | head -c ${UPLOAD_PAYLOAD_SIZE})" > ${TEMP_FILE} | |
# Upload two copies at the same time and record the upload speed. | |
EPOCH_TIME=$(date +%s) | |
SPEED_UPLOADS=$(parallel --no-notice curl ${CURL_UPLOAD_ARGS} ::: \ | |
"${ACANAC_SPEED_TEST_URL}upload.php?x=${EPOCH_TIME}-1" \ | |
"${ACANAC_SPEED_TEST_URL}upload.php?x=${EPOCH_TIME}-2") | |
# Add the upload speeds together because they executed in parallel. | |
SPEEDS=(${SPEED_UPLOADS//\ / }) | |
UPLOAD_SPEEDS=$(bc -l <<< "scale=2;(${SPEEDS[0]}+${SPEEDS[1]})") | |
# Create a list of the upload speeds. | |
LIST_UPLOAD_SPEEDS="${LIST_UPLOAD_SPEEDS} ${UPLOAD_SPEEDS} " | |
done | |
# Remove the temporary file. | |
rm ${TEMP_FILE} | |
# Add each upload speed together and divide by the total number for the average. | |
SPEED_UPLOAD=0 | |
LIST_UPLOAD_SPEEDS_SPLIT=(${LIST_UPLOAD_SPEEDS//\ / }) | |
for i in 0 1 2 3 4 5 | |
do | |
SPEED_UPLOAD=$(bc -l <<< "scale=2;(${SPEED_UPLOAD} + ${LIST_UPLOAD_SPEEDS_SPLIT[i]})") | |
done | |
SPEED_UPLOAD=$(bc -l <<< "scale=2;(${SPEED_UPLOAD}/6)") | |
# Bytes->bits, bits->kilobits->megabits. | |
SPEED_UPLOAD=$(bc -l <<< "scale=2;(${SPEED_UPLOAD}*8/(1000*1000))") | |
# Display the upload speed in a human readable format. | |
if [ "$CSV_MODE" == "false" ] ; then | |
echo "${SPEED_UPLOAD}Mbps up" | |
fi | |
#=============================================================================== | |
# CSV Output | |
#=============================================================================== | |
# Create the CSV output. | |
if [ "$CSV_MODE" == "true" ] ; then | |
echo "$(date +%s), ${SPEED_DOWNLOAD}, ${SPEED_UPLOAD}" | |
fi |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment