Skip to content

Instantly share code, notes, and snippets.

Sustainable development

You join a new team put together to deliver an exciting new feature. Your team build a new thing, you choose frameworks, languages and process to support that. Direction changes, your organisation wants something new. All your precious work to “standardise” now makes it hard to react to change.

A new team is formed to respond to the new direction, they don’t want to use the existing frameworks, languages and process as they aren’t familiar with them, the last team write bad code, or used a confusing framework. They build a new thing, they choose frameworks, languages and process to support that.

Repeat.

This process is wasteful, frustrating and dangerous as the stability of existing services is compromised.

import cv2
import signal
import matplotlib.pyplot as plt
from bokeh.plotting import figure
from bokeh.io import output_notebook, show, push_notebook
import cv2
import time
# https://github.com/opencv/opencv/blob/master/data/haarcascades/haarcascade_frontalface_default.xml
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import java.io.PrintWriter
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.model.{StatusCodes, HttpResponse, HttpRequest, HttpEntity}
import akka.http.scaladsl.model.headers.Location
import scala.concurrent.{Future, Await}
import scala.concurrent.duration._
def batch_get_bnumbers(dynamo_client, bnumbers):
keys = [
{ 'bnumber': { 'S': bnum } }
for bnum in bnumbers
]
response = dynamo_client.batch_get_item(
RequestItems={
'storage-migration-status' : {
'Keys': keys
def f(*args,**kwargs):
print(args)
print(kwargs)
f(1,2,3, name='robert')
args=(1,2,3)
kwargs={'name': 'robert'}
f(*args,**kwargs)
from contextlib import closing
class ProgressStore:
def _chunks(self, data, rows=10000):
for i in range(0, len(data), rows):
yield data[i:i+rows]
def _execute(self, sql):
with closing(self.connection.cursor()) as cursor:
return [o for o in cursor.execute(sql)]
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
def store(obj, identifier = None, path = None):
identifier = identifier or str(uuid.uuid1())
# chunk([1,2,3,4,5,6,7,8,9], 3)
# [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
def chunk(l, n):
return [l[i:i + n] for i in range(0, len(l), n)]
def get_queue(client, queue_name):
response = client.get_queue_url(
QueueName=queue_name
)
def load_list(file_location, delimiter=','):
items = []
f = open(file_location,'r')
for line in f:
items.append(line.strip().split(delimiter)[0])
f.close()
return items
import json
from pprint import pprint
import uuid
import argparse
import boto3
def assumed_role_session(role_arn):
sts_client = boto3.client('sts')