Skip to content

Instantly share code, notes, and snippets.

@aespinosa
Created September 16, 2015 12:49
Show Gist options
  • Save aespinosa/35f727e47c18c8057292 to your computer and use it in GitHub Desktop.
Save aespinosa/35f727e47c18c8057292 to your computer and use it in GitHub Desktop.
Messing with Apache Spark
public final class SparkTest {
private JavaSparkContext _spark;
private int slices;
public static void main(String[] args) throws Exception {
int slices = (args.length == 1) ? Integer.parseInt(args[0]) : 2;
SparkConf conf = new SparkConf().setAppName("SparkPi");
JavaSparkContext spark = new JavaSparkContext(conf)
SparkTest(spark, slices);
}
public SparkTest(JavaSparkContext spark, int slices) {
_spark = spark;
_slices = slices;
}
public float getPi(){
int slices = _slices;
int n = 100000 * slices;
List<Integer> l = new ArrayList<Integer>(n);
for (int i = 0; i < n; i++) {
l.add(i);
}
JavaRDD<Integer> dataSet = jsc.parallelize(l, slices);
int count = dataSet.map(new Function<Integer, Integer>() {
@Override
public Integer call(Integer integer) {
double x = Math.random() * 2 - 1;
double y = Math.random() * 2 - 1;
return (x * x + y * y < 1) ? 1 : 0;
}
}).reduce(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer integer, Integer integer2) {
return integer + integer2;
}
});
return 4.0 * count / n;
}
}
public class IntegrationTests {
@Test
public void shouldWork() {
SparkConf conf = new SparkConf().setAppName("TestApp").setMaster("local");
JavaSparkContext spark = new JavaSparkContext(conf);
int slices = 300;
interview = new SparkTest(spark, slices);
Assert.assertEquals(3.14, interview.getPi(), 0.001);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment