Created
March 18, 2011 13:34
-
-
Save jamesbrink/876070 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#This script will be run via cron to check for messages from various applications | |
#such as ntbackup, zmanda backup. these notifcation emmails will be parsed | |
#and insterted into mysql for later usage for monitoring and rrd graphs | |
require 'rmail' | |
require 'active_record' | |
require 'logger' | |
require 'date' | |
require 'time' | |
#mbox containing the messages to be parsed | |
mailbox="/var/spool/mail/monitor" | |
#our log file | |
log_file="/var/log/monitor_import" | |
log = Logger.new(log_file) | |
log.level = Logger::INFO | |
log.datetime_format = "%Y-%m-%d %H:%M " | |
#database settings and ActiveRecord models | |
ActiveRecord::Base.establish_connection(:adapter=>"mysql",:host=>"localhost",:database=>"sciens_internal_dev",:username=>"nope",:password=>"and hell no") | |
class Customer < ActiveRecord::Base | |
has_many :customer_sites | |
end | |
class CustomerSite < ActiveRecord::Base | |
has_many :customer_hosts | |
end | |
class CustomerHost < ActiveRecord::Base | |
belongs_to :customer_site | |
has_many :customer_ntb_datasets | |
has_many :customer_zcb_datasets | |
end | |
class CustomerMessage < ActiveRecord::Base | |
has_many :customer_ntb_jobs | |
has_many :customer_zcb_jobs | |
end | |
class CustomerNtbDataset < ActiveRecord::Base | |
belongs_to :customer_host | |
has_many :customer_ntb_jobs | |
end | |
class CustomerNtbJob < ActiveRecord::Base | |
belongs_to :customer_ntb_dataset | |
belongs_to :customer_message | |
end | |
class CustomerZcbDataset < ActiveRecord::Base | |
belongs_to :customer_host | |
has_many :customer_zcb_jobs | |
end | |
class CustomerZcbJob < ActiveRecord::Base | |
belongs_to :customer_zcb_dataset | |
belongs_to :customer_message | |
end | |
#load all messages from mbox and insert them into an array | |
log.info "loading messages from #{mailbox}" | |
messages = Array.new | |
File.open(mailbox) do |mbox| | |
RMail::Mailbox.parse_mbox(mbox) do |raw| | |
messages.push(raw) | |
end | |
end | |
#Main Loop - check to see if message has already been parsed and inserted into DB | |
messages.each do |message| | |
if( (message_id = /^Message-ID:\s\<(.*)\>/i.match(message) ) && (customer_message = CustomerMessage.find_by_message_id(message_id[1])) ) | |
elsif(message_id = /^Message-ID:\s\<(.*)\>/i.match(message)) | |
customer_message = CustomerMessage.create(:message_id=>message_id[1],:body=>message) | |
#this looks like a new message, lets get the host, customer | |
if( (site = /^From\s*(?:[0-9a-z\-_]*\.)?([0-9a-z\-_]*)\@/i.match(message)) && (host = /^Subject:\s*(?:nt-backup log host:\s*|\[zcb:\s*)([0-9a-z_\-]*)/i.match(message)) ) | |
#does this customer site exist? | |
if (!customer_site = CustomerSite.find_by_name(site[1])) | |
if(!customer=Customer.find_by_name(site[1])) | |
#customer does not exist, lets create this first | |
customer=Customer.create(:name=>site[1],:description=>"Created by monitor_import") | |
log.info "Created customer: #{site[1]}" | |
customer_site = customer.customer_sites.create(:name=>site[1],:description=>"Created by monitor_import") | |
log.info "Created customer-site: #{site[1]}" | |
else | |
#customer does exist, lets place the site under this customer | |
customer_site = customer.customer_sites.create(:name=>site[1],:description=>"Created by monitor_import") | |
log.info "Created customer: #{site[1]}" | |
end | |
end | |
#does host exist? if not lets create it | |
if(!customer_host = customer_site.customer_hosts.find_by_name(host[1])) | |
customer_host = customer_site.customer_hosts.create(:name=>host[1], :description=>"Created by monitor_import") | |
log.info "Created customer host #{host[1]}" | |
else | |
log.info "Customer host exists #{host[1]}" | |
end | |
end | |
#We should now have customer, customer site and customer host | |
#We now need to determine what to do with this message by looking at the subject line | |
#zmanda cloud backup | |
if(/^Subject:\s*\[zcb:/i.match(message)) | |
#Pick dataset name from message | |
if (job_info = /(^Backup|^Upload) of backup set "(.*)" (?:is )?(failed|successful)/.match(message)) | |
dataset=job_info[2] | |
task=job_info[1] | |
status=job_info[3] | |
#Check to see if this dataset is already in the customer_zcb_datasets table | |
if(customer_dataset = customer_host.customer_zcb_datasets.find_by_name(dataset)) | |
else | |
#create this dataset | |
customer_dataset = customer_host.customer_zcb_datasets.create(:name=>dataset,:description=>"Created by zmanda_import") | |
log.info "Created dataset: #{customer_dataset[:name]} withd id: #{customer_dataset[:id]}, for host #{customer_host[:name]}" | |
end | |
else | |
log.warn "Something went wrong, unable to find dataset in message from #{from_address}" | |
end | |
#determine if this is an Upload or a Backup job | |
if (task == "Backup") | |
#gather backup job details | |
start_time = /^Backup start time : (\d{4}\/\d\d\/\d\d) (\d\d:\d\d:\d\d)/.match(message) | |
end_time = /^Backup end time : (\d{4}\/\d\d\/\d\d) (\d\d:\d\d:\d\d)/.match(message) | |
files_on_disk = /^Files on disk : (\d*)/.match(message) | |
files_in_backup = /^Files in backup : (\d*)/.match(message) | |
details = /^Error Details : (.*)/.match(message) | |
backup_size = /^Total Bytes backed up : (\d.*) (KB|MB|GB)/.match(message) | |
#convert all sizes to KB | |
size = 0 | |
if (backup_size && backup_size.length > 1) | |
size = backup_size[1] | |
size = size.to_f | |
size = size * 1024 if backup_size[2] == "MB" | |
size = size * 1024 * 1024 if backup_size[2] == "GB" | |
size = size.to_i | |
end | |
#format time stamps for db format is YYYY/MM/DD HH:MM:SS | |
start_datetime = start_time[1] << " " << start_time[2] | |
end_datetime = end_time[1] << " " << end_time[2] | |
#add code here to calcuate backup rate, may not be possible if we dont have compression sizes? | |
zcb_job = customer_dataset.customer_zcb_jobs.create( | |
:task=>"Backup", | |
:size=>size, | |
:start_time=>start_datetime, | |
:end_time=>end_datetime, | |
:files_on_disk=>files_on_disk[1], | |
:files_in_backup=>files_in_backup[1], | |
:details=>details[1], | |
:customer_message_id => customer_message[:id], | |
:status=>status | |
) | |
log.info "Imported Zmanda backup job with for #{customer_dataset[:name]}, Status: #{zcb_job[:status]}, for host: #{customer_host[:name]}" | |
elsif (task == "Upload") | |
#gather upload job details | |
start_time = /^Upload start time : (\d{4}\/\d\d\/\d\d) (\d\d:\d\d:\d\d)/.match(message) | |
end_time = /^Upload end time : (\d{4}\/\d\d\/\d\d) (\d\d:\d\d:\d\d)/.match(message) | |
details = /^Error Details : (.*)/.match(message) | |
upload_rate = /^Upload Rate : (\d*) Kbps/.match(message) | |
upload_size = /^Bytes uploaded : ([\d\.]*) (KB|MB|GB)/.match(message) | |
#convert all sizes to KB | |
if (upload_size && upload_size.length > 1) | |
size = upload_size[1] | |
size = size.to_f | |
size = size * 1024 if upload_size[2] == "MB" | |
size = size * 1024 * 1024 if upload_size[2] == "GB" | |
size = size.to_i | |
end | |
#format time stamps for db format is YYYY/MM/DD HH:MM:SS | |
start_datetime = start_time[1] << " " << start_time[2] | |
end_datetime = end_time[1] << " " << end_time[2] | |
zcb_job = customer_dataset.customer_zcb_jobs.create( | |
:task=>"Upload", | |
:size=>size, | |
:start_time=>start_datetime, | |
:end_time=>end_datetime, | |
:rate=>upload_rate[1], | |
:size=>size, | |
:details=>details[1], | |
:customer_message_id => customer_message[:id], | |
:status=>status | |
) | |
log.info "Imported Zmanda upload job with for #{customer_dataset[:name]}, Status: #{zcb_job[:status]}, for host: #{customer_host[:name]}" | |
#match upload with backup job, for now will assume it was the previous backup for this dataset | |
else | |
#not sure what job type this is break out here | |
log.warn "Unkown job type - #{task}" | |
end | |
#nt backup | |
elsif(/^Subject:\s*nt-backup/i.match(message)) | |
#Pick dataset names from message | |
datasets=message.scan(/^Backup .* of \"(.*)\"/) | |
backups=message.scan(/^Backup .* of \"(.*)\"\n^Backup set (.*)\n^Backup description: \"(.*)\"\n^Media name: \"(.*)\"\n\n^Backup Type: (.*)\n\n^Backup started on (.*) at (.*) (AM|PM).\n^Backup completed on (.*) at (.*) (AM|PM).\n^Directories: (.*)\n^Files: (.*)\n^Bytes: (.*)/) | |
#did this job finish without error? | |
if (status = /^NTBackup finished the backup with no errors./.match(message)) | |
job_status="successful" | |
else | |
job_status="failed" | |
end | |
#backups array layout, format location) description - example | |
# 0) Dataset name - E: | |
# 1) Backup set - #3 on media #1 | |
# 2) Backup Descrition - SBS Backup created on 1/26/2011 at 11:00 PM | |
# 3) Media Name - Media created 1/26/2011 at 11:00 PM | |
# 4) Backup Type - Normal | |
# 5) Backup Start date - 1/27/2011 | |
# 6) Backup Start time - 1:41 | |
# 7) Backup Start time am or pm? - AM | |
# 8) Backup End date - 1/27/2011 | |
# 9) Backup End time - 1:43 | |
#10) Backup End time am or pm? - AM | |
#11) Directories - 71 | |
#12) Files - 895 | |
#13) Bytes - 281,723,426 | |
#check to see if datasets already exist, if not create them | |
if(datasets) | |
0.upto((datasets.count)-1) {|i| | |
#does dataset already exist? | |
if(customer_dataset = customer_host.customer_ntb_datasets.find_by_name(datasets[i][0])) | |
elsif(customer_dataset = customer_host.customer_ntb_datasets.create(:name=>datasets[i][0])) | |
log.info "Created NT-Backup dataset #{customer_dataset[:name]} with id of : #{customer_dataset[:id]} " | |
else | |
end | |
} | |
end | |
#insert each ntbackup job | |
backups.each do |job| | |
#this size == 14 shit needs to go | |
if(job.size ==14 && customer_dataset=customer_host.customer_ntb_datasets.find_by_name(job[0])) | |
backupset=job[1] | |
description=job[2] | |
media_name=job[3] | |
type=job[4] | |
start_time=DateTime.parse(job[5] + job[6] + job[7]) | |
end_time=DateTime.parse(job[8]+job[9]+job[10]) | |
directories=job[11].gsub(",","").to_i | |
files=job[12].gsub(",","").to_i | |
size=job[13].gsub(",","").to_i/1024 | |
customer_job = customer_dataset.customer_ntb_jobs.create( | |
:start_time=>start_time, | |
:end_time=>end_time, | |
:size=>size, | |
:directories=>directories, | |
:files=>files, | |
:status=>job_status, | |
:backupset=>backupset, | |
:description=>description, | |
:media_name=>media_name, | |
:customer_message_id=>customer_message[:id], | |
:type=>type | |
) | |
end | |
end | |
end | |
end | |
end #end of Main Loop | |
log.info "Application Clean Exit" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment