Skip to content

Instantly share code, notes, and snippets.

@srgrn
Last active April 5, 2016 09:12
Show Gist options
  • Select an option

  • Save srgrn/71f0786ca4779e3178b9f5d9c6d1ee80 to your computer and use it in GitHub Desktop.

Select an option

Save srgrn/71f0786ca4779e3178b9f5d9c6d1ee80 to your computer and use it in GitHub Desktop.
Azure Sql Server Metrics gatharer
{
"sqlservers":{
"servername": {
"hostname":"servername.database.windows.net",
"db_user":"username@servername",
"db_password":"password"
}
}
}
"""Azure Test SQL Module"""
import json
import argparse
import sys
import logging
import pymssql
def connect(server, database_name):
try:
conn = pymssql.connect(server=server['hostname'], user=server['db_user'], password=server['db_password'], database=database_name)
return conn
except:
logging.warning("connection Error will continue")
logging.exception('Got exception on connect to db {}, {}'.format(server, database_name))
return None
def check_azure_sql_db(server):
databases = get_all_databases(server)
metrics = {}
for db in databases:
metrics[db] = {}
logging.info(db)
conn = connect(server, db)
if conn is None:
logging.warning("Could not connect to DB {}".format(db))
metrics[db] = None
continue
historic_metrics = get_historic_metrics_from_master(server, db)
max_size = get_max_size(conn, db)
logging.info(max_size)
curr_size = get_current_size(conn)
logging.info(curr_size)
current_metrics = get_time_based_metrics_from_database(conn)
if len(current_metrics) > 0:
logging.info("got database metrics ({} to {})".format(current_metrics[0]['end_time'], current_metrics[-1]['end_time']))
else:
logging.info("missing database current metrics")
if len(historic_metrics) > 0:
logging.info("got database historic metrics ({} to {})".format(historic_metrics[0]['start_time'], historic_metrics[-1]['start_time']))
else:
logging.info("missing database historic metrics")
metrics[db]['Size'] = curr_size
metrics[db]['Size_Percent'] = (curr_size / max_size) * 100
metrics[db]['Historic_Metrics'] = historic_metrics
metrics[db]['Current_Metrics'] = current_metrics
conn.close()
return metrics
def get_time_based_metrics_from_database(conn):
query = 'SELECT * from sys.dm_db_resource_stats'
with conn.cursor(as_dict=True) as cursor:
cursor.execute(query)
return cursor.fetchall()
return None
def get_historic_metrics_from_master(server, name):
with connect(server, 'master') as conn:
if conn is not None:
query = 'SELECT * from sys.resource_stats where database_name = \'{}\''.format(name)
with conn.cursor(as_dict=True) as cursor:
cursor.execute(query)
ret = cursor.fetchall()
conn.close()
return ret
logging.warning("Could not connect to DB")
return None
def get_max_size(conn, name):
query = "SELECT CONVERT(BIGINT,DATABASEPROPERTYEX ( '{}' , 'MAXSIZEINBYTES'))/1024/1024 AS 'MAXSIZE_IN_MB'".format(name)
with conn.cursor(as_dict=True) as cursor:
cursor.execute(query)
row = cursor.fetchone()
if row:
# print row
return row['MAXSIZE_IN_MB']
else:
return 0
def get_current_size(conn):
query = "SELECT (SUM(reserved_page_count) * 8192) / 1024 / 1024 AS DbSize_IN_MB FROM sys.dm_db_partition_stats;"
try:
with conn.cursor(as_dict=True) as cursor:
cursor.execute(query)
row = cursor.fetchone()
if row:
# print row
return row['DbSize_IN_MB']
else:
return -1
except:
logging.exception('Got exception during query for current size')
return -1
def get_all_databases(server):
try:
conn = connect(server, 'master')
if conn is not None:
db_names = []
with conn.cursor(as_dict=True) as cursor:
query = "SELECT name FROM sys.databases where name != 'master' ;"
cursor.execute(query)
for row in cursor:
logging.debug('row = %r' % (row,))
db_names.append(row['name'])
conn.close()
return db_names
except:
logging.warning("Had an error getting databases")
raise
"""
----------------------------------------------------------------
BOILERPLATE
----------------------------------------------------------------
"""
CONFIG = None
def setup(args):
log_level = 'WARNING'
if args.debug:
import pdb
log_level = 'DEBUG'
log_format = '%(asctime)-15s - %(levelname)s - %(message)s'
logging.basicConfig(format=log_format, level=log_level)
logging.debug('Setup logging configuration')
if args.config:
logging.debug('loading config file')
try:
with open(args.config) as configfile:
config = json.load(configfile)
except IOError:
logging.critical("Failed to load the file")
sys.exit(1)
except ValueError:
logging.critical("Failed to read the config file probably not proper json")
sys.exit(1)
except Exception, e:
raise e
return config
else:
logging.debug('no config file specified')
return None
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-c', '--config', help='configuration file to load', required=True)
parser.add_argument('-D', '--debug', help='enable debug mode', action='store_true')
parser.add_argument('-s', '--server', help='Server Name')
args = parser.parse_args()
global CONFIG
CONFIG = setup(args)
if args.server is not None:
servers = CONFIG.get('sqlservers', [])
server = servers[args.server]
if server is None:
raise ValueError("Cannot find server")
check_azure_sql_db(server)
else:
servers = CONFIG.get('sqlservers', [])
for s in servers:
print s, servers[s]
check_azure_sql_db(servers[s])
if __name__ == '__main__':
main()
"""
----------------------------------------------------------------
END BOILERPLATE
----------------------------------------------------------------
"""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment