Most Unix/Linux systems come with python pre-installed:
$ python -V
#!/bin/bash | |
if [ "$#" -eq 0 ]; then | |
md5sum "$@" | |
exit 0 | |
fi | |
recursive=0 | |
exclude=0 | |
sum=0 |
sudo apt-get install apparmor | |
Reading package lists... Done | |
Building dependency tree | |
Reading state information... Done | |
Suggested packages: | |
apparmor-docs apparmor-utils | |
The following NEW packages will be installed: | |
apparmor | |
0 upgraded, 1 newly installed, 0 to remove and 3 not upgraded. | |
Need to get 0 B/319 kB of archives. |
#!/usr/bin/ruby | |
#http://premium-leechers.blogspot.com | |
require "cgi" | |
require "clipboard" | |
if ARGV.size != 3 then | |
STDERR.puts "premium-leechers.blogspot.com" | |
STDERR.puts "1fichier premium downloader script." | |
STDERR.puts "Usage: ruby 1f <email> <password> <txt file with links|1fichier link|'clipboard'>" | |
exit |
put the function in your .zshrc or .bashrc and then
~ ia-save http://twitter.com/atomotic
https://web.archive.org/web/20140702123925/http://twitter.com/atomotic
#!/bin/bash | |
url=http://redefininggod.com | |
webarchive=https://web.archive.org | |
wget="wget -e robots=off -nv" | |
tab="$(printf '\t')" | |
additional_url=url.list | |
# Construct listing.txt from url.list | |
# The list of archived pages, including some wildcard url |
#!/usr/bin/env zsh | |
# Initialize VPN | |
sudo vpnns up | |
sudo vpnns start_vpn | |
# Popcorn time! | |
sudo ip netns exec frootvpn sudo -u $USER popcorntime | |
# Cleanup |
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
# @author: johan | |
# @date: 2014-12-12 | |
# @modified_by: johan | |
# @modified_at: 2014-12-12 | |
import re # Regular expressions | |
import requests # To make HTTP requests | |
import json # To parse 4chan's JSON |