Skip to content

Instantly share code, notes, and snippets.

View JasonMillward's full-sized avatar
🐝
I may be slow to respond.

Jason JasonMillward

🐝
I may be slow to respond.
View GitHub Profile
import json
from urllib2 import Request, urlopen
import psutil
import os
def octoprint(path):
q = Request("http://10.0.0.60/api/{}".format(path))
q.add_header("X-Api-Key", "[YOUR API KEY GOES HERE]")
a = urlopen(q).read()
#include <Adafruit_NeoPixel.h>
#include "ESP8266WiFi.h"
#define PIN D4
Adafruit_NeoPixel strip = Adafruit_NeoPixel(10, PIN, NEO_GRB + NEO_KHZ800);
bool discomode = true;
void setup() {
@JasonMillward
JasonMillward / spritesheetExtractor.py
Created May 12, 2017 03:22
Extract .png files from a compiled sprite sheet Raw
import binascii
import re
import os
for directory, subdirectories, files in os.walk('.'):
for file in files:
if not file.endswith('.bin'):
continue
#!/bin/bash
ghost_path=$1
if [ -z $ghost_path ]; then
echo "Usage: $0 <ghost path>";
exit 1;
fi
forever stopall
var debug = true;
function log(str) {
if (debug) {
var currentTime, h, m, s;
currentTime = new Date;
h = currentTime.getHours();
m = currentTime.getMinutes();
s = currentTime.getSeconds();
try {
#!/bin/bash
find . -type f -iname "*.png" -print0 | while IFS= read -r -d $'\0' file; do
isWhite=`convert $file -colorspace HSL -channel g -separate +channel -format "%[fx:mean]" info:`
if [ "$isWhite" == "0" ]
then
rm $file
else
def hodor(hodor):
return {
0: "Hodor",
1: "Hodor Hodor"
}[hodor]
#!/bin/bash
txtred='\e[0;31m'
txtprp='\e[1;35m'
txtgrn='\e[0;32m'
txtrst='\e[0m'
find /mnt/kronos -type f | while read -r line; do
###
jQuery Auto Capitalise + Capslock checker
Adds CSS to input classes, and optionaly checks for capslock and promts to turn off
Should prevent ALL CAPS data from appearing in database
Released under the MIT license
Copyright (c) 2013, Jason Millward
@version $Id: 1, 2013-09-04 $;
@JasonMillward
JasonMillward / download.sh
Last active November 5, 2017 20:36
Download imgur.com galleries easily. This snippet downloads the latest 100 pages of images in the wallpaper gallery.
#!/bin/bash
for ((i = 0; i <= 100; i++)); do
echo "Grabbing page $i"
wget -q "http://imgur.com/r/wallpapers/page/$i" -O - | grep 'class="post"' | cut -d\" -f2 | while read id
do
echo "Downloading $id.jpg"
wget -q -c "http://i.imgur.com/$id.jpg"
done