Connects the request to app to approrpriate scenario database
- 200: [
route.json
]
returns
- 200: [[
tripId
]]
case class Reader[C, A](g: C => A) { | |
def apply(c: C) = g(c) | |
def map[B](f: A => B): Reader[C, B] = { | |
Reader{ c => f(g(c)) } | |
} | |
def flatMap[B](f: A => Reader[C, B]): Reader[C, B] = { | |
Reader{ c => f(g(c))(c) } | |
} |
#!/usr/bin/env python | |
import os, argparse, sys, re | |
license = """Copyright (c) 2014 %s. | |
Licensed under the Apache License, Version 2.0 (the "License"); | |
you may not use this file except in compliance with the License. | |
You may obtain a copy of the License at |
trait Unique { | |
val id: String | |
} | |
/** | |
* Abstraction over a standard "Indexed" table. | |
*/ | |
trait CrudComponent{ this: Profile => | |
import profile.simple._ |
{% for host in groups['master'] %} | |
{{ hostvars[host]['private_ip'] }} | |
{% endfor %} |
export PS1="[ ${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\] \[\033[01;34m\]\w\[\033[00m\] ]$ " | |
cd 'swig/java' do | |
system "echo JAVA_HOME=`/usr/libexec/java_home` >> java.opt" | |
system "sed -i.bak s/linux/darwin/g java.opt" | |
system "make" | |
system "make install" | |
end |
<html class="no-js" lang=""><head> | |
<meta charset="utf-8"> | |
<meta http-equiv="X-UA-Compatible" content="IE=edge"> | |
<title>Climate Change</title> | |
<meta name="description" content=""> | |
<meta name="viewport" content="width=device-width, initial-scale=1"> | |
<link rel="apple-touch-icon" href="apple-touch-icon.png"> | |
<style> |
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 7 in stage 2.0 failed 4 times, most recent failure: Lost task 7.3 in stage 2.0 (TID 253, ec2-??-???-??-???.compute-1.amazonaws.com): java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE | |
at sun.nio.ch.FileChannelImpl.map(FileChannelImpl.java:829) | |
at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:123) | |
at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:132) | |
at org.apache.spark.storage.BlockManager.doGetLocal(BlockManager.scala:517) | |
at org.apache.spark.storage.BlockManager.getLocal(BlockManager.scala:432) | |
at org.apache.spark.storage.BlockManager.get(BlockManager.scala:618) | |
at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:44) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:228) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61) |
//Source: http://virtuslab.com/blog/debugging-implicits/ | |
import scala.reflect.runtime.universe._ | |
def tree = reify{ list.sorted }.tree | |
//tree: reflect.runtime.universe.Tree | |
show(tree) | |
// Predef.refArrayOps($read.list).sorted(Ordering.ordered(Predef.$conforms)) |