Created
August 15, 2014 17:37
-
-
Save michaelcoyote/424467fb5a524a1a8a84 to your computer and use it in GitHub Desktop.
An automated Avamar perf test thing that ended up not being needed and was never finished.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
# | |
# Automate the testing for Avamar POC | |
# script should be able to log activity and useful status. | |
# TODO | |
# check error on all subprocess.Popen() and file open() functions | |
# Error handling: i.e. catch/log errors before dieing | |
# make sure all the function defs check inputs | |
# | |
# | |
## pull in the necessary modules ## | |
import os | |
import sys | |
import subprocess | |
from xml.dom import minidom | |
import logging | |
import datetime | |
################################### | |
# | |
## set up our logging ########### | |
today = datetime.date.today() | |
LOGDIR = "./log" | |
# make our logdir | |
if not os.path.exists(LOG): | |
try: | |
os.makedirs(LOG) | |
except: | |
if exception.errno != errno.EEXIST: | |
raise | |
LOGFILE = LOGDIR,"/",today,"_backup_logfile.log" | |
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%Y%m%d %I:%M:%S %p',filename='example.log') | |
################################# | |
###################################################################################### | |
# | |
# | |
def ssh_command(remote_user="root",remote_host="localhost",remote_command="hostname"): | |
""" run an arbitrary ssh command and return the output. No output checking or ssh error catching """ | |
# this would better be done with something like paramiko. This way requires no additonal software | |
ssh_command = ["ssh",remote_user,"@",remote_host," \'",remote_command,"\'"] | |
sshraw = subprocess.Popen(myssh,stdout=subprocess.PIPE) | |
sshout, ssherr = sshraw.communicate() | |
if sshraw.returncode: | |
logging.warning(" Error in run_backup with\" "ssh_command"\" :\n",ssherr) | |
raise Exception(ssherr) | |
else: | |
logging.info(" ssh command \'",ssh_command,"\' ran") | |
logging.info(" ssh output: ",sshout) | |
return sshout | |
## end ssh_command | |
def run_backup(testcli,testgrp): | |
""" Run backup on Avamar via mccli - testcli takes full """ | |
# mccli client backup-group-dataset --name="/test01/testclient01" --group-name="/test01/group01" | |
mccli_run_backup = ["mccli","client","backup-group-dataset","--name=",testcli," --group-name=",testgrp ] | |
runraw = subprocess.Popen(mccli_run_backup, stdout=subprocess.PIPE) | |
runout, runerr = runraw.communicate() | |
if runraw.returncode: | |
logging.warning(" Error in run_backup: ",runerr) | |
raise Exception(runerr) | |
else: | |
runxml = minidom.parseString(runout) | |
# select activity ID and return | |
activity_id = runxml.getElementsByTagName('ID')[0] | |
return activity_id.firstChild.nodeValue[0] | |
## end run_backup | |
def show_status(act_id=0): | |
""" read in the status in XML format and stick it in a minidom object """ | |
# get the status line from mccli | |
mccli_show_status = ["mccli","activity","show","--xml","--id="+act_id] | |
actraw = subprocess.Popen(mccli_show_status,stdout=subprocess.PIPE) | |
actout,acterr = actraw.communicate() | |
if actraw.returncode: | |
logging.warning(" Error in show_status: ",acterr) | |
raise Exception(acterr) | |
else: | |
# Step 2: XML processing | |
actxml = minidom.parseString(actout) | |
status = actxml.getElementsByTagName('Status')[0] | |
elapsed = actxml.getElementsByTagName('Elapsed')[0] | |
processed = actxml.getElementsByTagName('ProgressBytes')[0] | |
start = actxml.getElementsByTagName('StartTime')[0] | |
end = actxml.getElementsByTagName('EndTime')[0] | |
# return the relevant info. | |
logging.info(" Backup Status for ",act_id,":",status.firstChild.nodeValue[0]," ------") | |
logging.info(" Elapsed time: ",elapsed.firstChild.nodeValue[0]," Data processed:",proceessed.firstChild.nodeValue[0]) | |
logging.info(" Start time: ",start.firstChild.nodeValue[0]," End time: ",end.firstChild.nodeValue[0]) | |
# write out a plain csv | |
# activity ID, Status, Elapsed time, Processed, Start time, End time | |
with open(csvfileout, 'w') as csv: | |
csv.write(act_id,",",status.firstChild.nodeValue[0],",",elapsed.firstChild.nodeValue[0],",",processed.firstChild.nodeValue[0],\ | |
",",start.firstChild.nodeValue[0],",",end.firstChild.nodeValue[0]) | |
csv.close() | |
logging.info( "status line written to ",csvfileout," ----------------------") | |
return [status.firstChild.nodeValue[0]] | |
## end show_status | |
def show_log(act_id=0,logfileout='avamar_log_out'): | |
"""show the log and save to logfile""" | |
# get the logfile f | |
mccli_show_log = ["mccli","activity","get-log","--completed=true","--id=",act_id] | |
lograw = subprocess.Popen(mccli_show_log,stdout=subprocess.PIPE) | |
log_o,log_err = lograw.communicate() | |
# error check | |
if lograw.returncode: | |
logging.warning(" Error in show_log: ",log_err) | |
raise Exception(log_err) | |
else: | |
# save returned log to $LOCALDIR/log/YYYYMMDD_HHMM_$actid.log | |
with open(logfileout, 'w') as l: | |
l.write(log_o) | |
l.close() | |
logging.info(" Backup job log written to ",logfileout) | |
return logfileout | |
## end show_log | |
def cpu_pc_ed(pc=100,plugin=1001,client="guest-2"): | |
"""Edit CPU throttling """ | |
# call to mccli to edit --cpu-throttle valu | |
mccli_edit_cpu = ["mccli","dataset","edit-option","--plugin="+plugin,"--name=/test01/t01-linux-ds","--option=cpu-throttle","--value=",pc] | |
ecpuraw = subprocess.Popen(mccli_edit_cpu,stdout=subprocess.PIPE) | |
ecpuout, ecpuerr = ecpuraw.communicate() | |
# error check | |
if ecpuraw.returncode: | |
logging.warning(" Error in cpu_pc_ed: ",ecpuerr) | |
raise Exception(ecpuerr) | |
elif ecpuout.find("modified") == "-1" : | |
logging.warning(" Unable to modify CPU throttle value: ",ecpout) | |
return -1 | |
else: | |
logging.info(" CPU throttle value set to ",pc) | |
logging.info(" mccli dataset edit-option output: ",ecpout) | |
return 0 | |
## end cpu_pc_ed | |
def create_workload(gb_workload=10,client="lguest-2",wl_dir="~/work"): | |
"""Create workload on client""" | |
pass # not ready yet | |
#run workload creator gb_workload in wl_dir on client | |
#return error if fails | |
# | |
# | |
## end create_workload | |
# Inital backup load | |
# gb_workload = [50,100,200,500] | |
#cpu_allow = [100,10,5] | |
#for wl in gb_workload: | |
#for cpu in cpu_allow: | |
# create_workload(GB_workload) | |
#set_cpu_pc(cpu) | |
#run_backup() | |
#while (show_status()[0] == "Running") : | |
#sleep(120) | |
#rm_workload() | |
#next | |
#next | |
# Post inital backup load | |
#for GB_workload = 500,1000,2000 | |
# create_workload($GB_workload) | |
# run_backup() | |
# for cpu_allow = 100,10,5 | |
# #monitorVM() | |
# run_backup() | |
# next | |
#next | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment