A command line script to retrieve json for all of your gists.
github.sh [username] [password] [total number of gists] [oath or user:password]
<DOCTYPE html> | |
<html> | |
<head> | |
<title>Stop and Frisk</title> | |
<meta charset="utf-8" /> | |
<meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
<!--[if lte IE 8]><link rel="stylesheet" href="js/ext/wax/ext/leaflet.ie.css" /><![endif]--> | |
<script src="js/ext/wax/ext/leaflet.js"></script> | |
<script src='js/ext/wax/dist/wax.leaf.min.js' type='text/javascript'></script> | |
<link rel="stylesheet" href="js/ext/wax/ext/leaflet.css" /> |
#!/bin/bash | |
## Usage: ./hirise.sh | |
## This is a self-contained script to scrape the University of Arizona's HiRise imaging experiment FTP archive. it generates a single file (FILES.txt) containing urls for each Mars image in the archive. This makes it easier to programatically access the metadata .LBL files and ultimately, the source images (.jp2). | |
declare -a DIRECTORIES=('ORB_011200_011299' 'ORB_011300_011399' 'ORB_011400_011499' 'ORB_011500_011599' 'ORB_011600_011699' 'ORB_011700_011799' 'ORB_011800_011899' 'ORB_011900_011999' 'ORB_012000_012099' 'ORB_012100_012199' 'ORB_012200_012299' 'ORB_012300_012399' 'ORB_012400_012499' 'ORB_012500_012599' 'ORB_012600_012699' 'ORB_012700_012799' 'ORB_012800_012899' 'ORB_012900_012999' 'ORB_013000_013099' 'ORB_013100_013199' 'ORB_013200_013299' 'ORB_013300_013399' 'ORB_013400_013499' 'ORB_013500_013599' 'ORB_013600_013699' 'ORB_013700_013799' 'ORB_013800_013899' 'ORB_013900_013999' 'ORB_014000_014099' 'ORB_014100_014199' 'ORB_014200_014299' 'ORB_014300_014399' 'ORB_014 |
## run in a directory with many raster images containing nodata values that make their gdalinfo extents unreliable. output is a csv containing filename and data-containing extents, to be imported into postgis, sqlite, etc for running spatial queries. | |
## usage: | |
echo "file;GEOMETRY" > index.csv | |
declare -a FILES=$(ls -1 *tif); | |
for FILE in ${FILES[@]}; do | |
FILE=${FILE%.tif} | |
echo "$FILE.tif;" > $FILE.txt | |
gdal_trace_outline $FILE.tif -ndv '0 0' -out-cs en -wkt-out $FILE.wkt | |
cat $FILE.wkt >> $FILE.txt |
#!/bin/bash | |
if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then | |
echo 'Usage: translate.sh ["Original Text"] ["Source Language"] ["Target Language"]' | |
echo 'Example: translate.sh "Hello World" "en" "fr"' | |
declare -a LANG_NAMES=('Arabic' 'Czech' 'Danish' 'German' 'English' 'Estonian' 'Finnish' 'French' 'Dutch' 'Greek' 'Hebrew' 'Haitian Creole' 'Hungarian' 'Indonesian' 'Italian' 'Japanese' 'Korean' 'Lithuanian' 'Latvian' 'Norwegian' 'Polish' 'Portuguese' 'Romanian' 'Spanish' 'Russian' 'Slovak' 'Slovene' 'Swedish' 'Thai' 'Turkish' 'Ukrainian' 'Vietnamese' 'Simplified Chinese' 'Traditional Chinese'); | |
declare -a LANG_CODES=('ar' 'cs' 'da' 'de' 'en' 'et' 'fi' 'fr' 'nl' 'el' 'he' 'ht' 'hu' 'id' 'it' 'ja' 'ko' 'lt' 'lv' 'no' 'pl' 'pt' 'ro' 'es' 'ru' 'sk' 'sl' 'sv' 'th' 'tr' 'uk' 'vi' 'zh-CHS' 'zh-CHT'); | |
for IX in $(seq 0 $((${#LANG_CODES[@]} - 1))); do | |
echo ${LANG_CODES[$IX]}: ${LANG_NAMES[$IX]}; | |
done |
#!/usr/bin/python | |
# -*- coding: utf-8 -*- | |
import sqlite3 | |
import sys | |
import re | |
import uuid | |
from bs4 import * | |
import lxml | |
import unicodedata |
from osgeo import osr | |
srs = osr.SpatialReference() | |
wkt_text = 'PROJCS["Transverse Mercator",GEOGCS["GCS_Everest_1830",DATUM["D_Everest_1830",SPHEROID["Everest_1830",6377299.36,300.8017]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500295.0],PARAMETER["False_Northing",-2000090.0],PARAMETER["Central_Meridian",90.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]' | |
srs.importFromWkt(wkt_text) | |
srs.ExportToProj4() |
mapbox.load('herwig.map-siz5m7we', function(o) { | |
var map = mapbox.map('map'); | |
map.addLayer(o.layer); | |
map.zoom(4).center({ | |
lat: -28.613, | |
lon: 144.14 | |
}).setPanLimits([{ | |
lat: -85.0511, | |
lon: -180 | |
}, { |