Manage an infrastructure that is impossible to outgrow!
Enjoy the DevOps lifestyle; automate everything. Have fun scripting as well as programming.
# List unique values in a DataFrame column | |
pd.unique(df.column_name.ravel()) | |
# Convert Series datatype to numeric, getting rid of any non-numeric values | |
df['col'] = df['col'].astype(str).convert_objects(convert_numeric=True) | |
# Grab DataFrame rows where column has certain values | |
valuelist = ['value1', 'value2', 'value3'] | |
df = df[df.column.isin(valuelist)] |
#!/usr/bin/env python | |
import hashlib | |
import optparse | |
import paramiko | |
from Crypto.PublicKey import RSA | |
def insert_char_every_n_chars(string, char='\n', every=64): | |
return char.join( |
{ | |
"AWSTemplateFormatVersion": "2010-09-09", | |
"Description": "Core ec2 example: http://coreos.com/docs/ec2/", | |
"Parameters": { | |
"InstanceType" : { | |
"Description" : "EC2 instance type", | |
"Type" : "String", | |
"Default" : "t1.micro", | |
"AllowedValues" : [ "t1.micro","m1.small","m1.medium","m1.large","m1.xlarge", "m3.xlarge", "m3.2xlarge", "m2.xlarge","m2.2xlarge","m2.4xlarge","c1.medium","c1.xlarge","cc1.4xlarge","cc2.8xlarge","cg1.4xlarge", "hi1.4xlarge", "hs1.8xlarge"], |
#!/usr/bin/env ruby | |
require 'rubygems' | |
require 'nokogiri' | |
require 'net/http' | |
# check_aws_status.rb | |
# A nagios plugin for fetching RSS feeds from http://status.aws.amazon.com. | |
# Source: https://gist.github.com/1604786 | |
# Written by Aaron Suggs: https://github.com/ktheory |
#!/usr/bin/env ruby | |
require "redis" | |
redis = Redis.new | |
redis.keys("*").each do |key| | |
val = case redis.type(key) | |
when "string" | |
redis.get key | |
when "list" |
A good commit message looks like this: | |
Header line: explaining the commit in one line | |
Body of commit message is a few lines of text, explaining things | |
in more detail, possibly giving some background about the issue | |
being fixed, etc etc. | |
The body of the commit message can be several paragraphs, and | |
please do proper word-wrap and keep columns shorter than about |
#!/bin/bash | |
(/usr/local/bin/db2log | \ | |
mk-query-digest --fingerprints \ | |
--filter '$event->{user} !~ m/^(bi|memonic)$/') 2>&1 | \ | |
mail -s "MySQL slow logs" root | |
# Rotate slow logs. Will move them into the backup table slow_log_backup. If | |
# that table exists it's overwritten with the primary slow log. | |
# So with this strategy we can still access yesterday's slow log by querying | |
# slow_log_backup. |
require 'strscan' | |
require 'forwardable' | |
# Stupid JSON parser. Only handles well-formed JSON. | |
# Otherwise, it may go into an endless loop. | |
class Parser | |
WSP = /\s+/ | |
OBJ = /[{\[]/ | |
NUM = /-?\d+(\.\d+)?([eE][+-]?\d+)?/ | |
BOL = /(?:true|false)\b/ |