Skip to content

Instantly share code, notes, and snippets.

import sys
import pandas as pd
# Function to retrieve a list of Pandas DataFrames and their sizes in memory
# Call the function:
# pandas_dfs_in_memory(mydir=dir(),parent_vars=globals())
def pandas_dfs_in_memory(mydir,parent_vars):
# get a list of all the local objects with sizes
objects=[]
#!/usr/bin/env python
"""
Credit: https://gist.github.com/omnidan/1456674
Copyright (c) 2011, Daniel Bugl
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
import ipinfo
from opencensus.ext.azure.log_exporter import AzureLogHandler
def main(host, port):
print ('Starting honeypot!')
# initialize Azure Monitoring logger object
logger = startLogger()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host, port))
# Write connection attempts to App Insights
# https://docs.microsoft.com/en-us/azure/azure-monitor/app/opencensus-python
def writeAppInsights(logger,address,data):
# create the custom_dimensions dictionary
logData = {'custom_dimensions': {
'Time': time.ctime(),
'IP': address[0],
'Port': address[1],
import ipinfo
def ipData(address):
# https://stackoverflow.com/questions/24678308/how-to-find-location-with-ip-address-in-python
ip_address = address[0]
# ipinfo package - free up to 50k requests
# https://github.com/ipinfo/python
# login to ipinfo account: https://ipinfo.io/account?welcome=true
access_token = '<your-access-token'
from opencensus.ext.azure.log_exporter import AzureLogHandler
# function to initialize App Insights logger object
def startLogger():
from opencensus.ext.azure.log_exporter import AzureLogHandler
# Get Azure Monitor instrumentation key from environment variable
instrumentation_key = os.environ.get('InstrumentationKey')
# initialize logger object
logger = logging.getLogger(__name__)
# Convert the Date column back to string,
# find January 1st 2020,
# only keep State and percent unemployed columns
# Set index to State
jan_emp = df[df['Date'].dt.strftime(date_format='%Y-%m-%d').str.contains('2020-01-01')][['State','Pct_Unemployed']].set_index('State')
# Convert the Date column back to string,
# find June 1st 2020,
# only keep State and percent unemployed columns
# Set index to State
def clean_bls_data(df):
'''
Function for cleaning BLS data into a 'tidy' vertical dataset like:
State | Date | Metric
Returns pandas dataframe.
'''
# Melt the dataframe into vertical format.
melted_df = pd.melt(df, id_vars=['Date'],
value_vars=df.columns[1:],
import pandas as pd
import blspandas
import api_key
import requests
import json
# Pull a list of state fips Ids.
bls_fips = blspandas.get_state_fips()
# Create a dictionary of BLS query Ids and States
# Import packages
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# Indedependent variable - number of chimpanzees in hunting party
x = np.array([1,2,3,4,5,6,7,8])
# Dependent Variable - percent of successful hunts
y = np.array([30,45,51,57,60,65,70,71])