From a host homeplate
Targeting a host secondbase
Assumes you've already got a public key on homeplate
. If not:
you@homeplate $ ssh-keygen -t rsa
```
Make sure there's an `~/.ssh` directory on `secondbase`:
##### State area data #### | |
#this script extracts state area data from US Census Bureau TIGER shapefiles and prepares them for inclusion in the `choroplethrMaps` package. | |
require("rgdal") # requires sp, will use proj.4 if installed | |
require("maptools") | |
require('devtools') | |
#create a new directory for for the shapefile; | |
setwd('scratch') | |
zipfileSt <- 'cb_2014_us_state_20m' |
cat <<EOF >>poly.txt | |
1 POLYGON((100 0, 101 0, 101 1, 100 1, 100 0)) | |
2 POLYGON((-78.52 38.00, -78.44 38.00, -78.44 38.07, -78.52 38.07, -78.52 38.00)) | |
EOF | |
cat <<EOF >>poly.conf | |
{ | |
sft = { | |
type-name = "bbox" | |
attributes = [ |
#! /usr/bin/env bash | |
# | |
# Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. | |
# All rights reserved. This program and the accompanying materials | |
# are made available under the terms of the Apache License, Version 2.0 which | |
# accompanies this distribution and is available at | |
# http://www.opensource.org/licenses/apache2.0.php. | |
# | |
setGeoHome () |
import pandas as pd | |
import folium | |
from matplotlib.colors import Normalize, rgb2hex | |
import matplotlib.cm as cm | |
data = pd.read_csv('http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_day.csv') | |
norm = Normalize(data['mag'].min(), data['mag'].max()) | |
map = folium.Map(location=[48, -102], zoom_start=3) | |
for eq in data.iterrows(): |
package mil.nga.giat.geowave.analytics.spark.tools | |
import com.vividsolutions.jts.geom.LineString | |
import mil.nga.giat.geowave.adapter.vector.FeatureWritable | |
import mil.nga.giat.geowave.analytic.distance.DistanceFn | |
import mil.nga.giat.geowave.analytic.partitioner.Partitioner.PartitionData | |
import mil.nga.giat.geowave.analytics.spark._ | |
import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions | |
import mil.nga.giat.geowave.mapreduce.input.{GeoWaveInputFormat, GeoWaveInputKey} | |
import org.apache.spark.rdd.RDD |
Describing attributes of feature 'ObjectDetection' from catalog table 'geomesa122.gbdx'... | |
geom: Point (ST-Geo-index) | |
item_date: Date (ST-Time-index) (Indexed) | |
polygon: Polygon | |
vector_type: String | |
ingest_date: Date (Indexed) | |
text: String | |
source: String | |
item_type: List | |
first_item_type: String |
#!/usr/bin/env bash | |
# Script to take these gbdx vector zips and create tar.gz from them | |
maxLinesStage=700000 | |
outFileNum=0 | |
workDir=/ebs/workdir | |
# find the zips and loop over them |
From a host homeplate
Targeting a host secondbase
Assumes you've already got a public key on homeplate
. If not:
you@homeplate $ ssh-keygen -t rsa
```
Make sure there's an `~/.ssh` directory on `secondbase`:
#!/usr/bin/env | |
git clone [email protected]:apache/spark | |
pushd spark | |
git checkout v1.5.0 | |
./dev/change-scala-version 2.11 | |
./make-distribution.sh --name scala_2.11-without-hadoop --tgz -Phadoop-provided -Dscala-2.11 -Pyarn -DskipTests |
import java.time._ | |
import org.joda.time._ | |
val j8 = ZonedDateTime.now(ZoneId.of("Z")) | |
//j8: java.time.ZonedDateTime = 2017-06-01T19:40:54.933Z | |
val z = DateTimeZone.forOffsetMillis(j8.getOffset.getTotalSeconds * 1000) | |
//z: org.joda.time.DateTimeZone = UTC | |
val jodated = new DateTime(j8.toInstant().toEpochMilli(), z) |