I hereby claim:
- I am hamnis on github.
- I am hamnis (https://keybase.io/hamnis) on keybase.
- I have a public key whose fingerprint is 7804 AC1C 98A5 12E3 875E 42F9 6DBA 10D6 E1D1 FAD3
To claim this, I am signing this object:
public static <A, B> B foldLeft(Stream<A> iterable, B identity, BiFunction<B, A, B> bf) { | |
return foldLeft(iterable.iterator(), identity, bf); | |
} | |
public static <A, B> B foldLeft(Iterable<A> iterable, B identity, BiFunction<B, A, B> bf) { | |
return foldLeft(iterable.iterator(), identity, bf); | |
} | |
public static <A, B> B foldLeft(Iterator<A> iterator, B identity, BiFunction<B, A, B> bf) { | |
B result = identity; |
package iso; | |
import java.util.function.Function; | |
public interface Iso<A, B> { | |
Function<A, B> get(); | |
Function<B, A> reverseGet(); | |
} |
<!DOCTYPE html> | |
<html> | |
<head> | |
<title>Vagrant and Ansible</title> | |
<meta charset="utf-8"> | |
<style> | |
@import url(https://fonts.googleapis.com/css?family=Yanone+Kaffeesatz); | |
@import url(https://fonts.googleapis.com/css?family=Droid+Serif:400,700,400italic); | |
@import url(https://fonts.googleapis.com/css?family=Ubuntu+Mono:400,700,400italic); |
package net.hamnaberg.future; | |
import java.util.concurrent.*; | |
public class FutureConverter { | |
public static <A> CompletableFuture<A> convert(Future<A> future, ExecutorService executor, boolean shutdown) { | |
if (future instanceof CompletableFuture) | |
return (CompletableFuture<A>) future; | |
final BlockingQueue<Future<A>> queue = new LinkedBlockingQueue<>(1); |
package kafkaclient | |
import scalaz.stream._ | |
import scalaz.concurrent.Task | |
import java.util.Properties | |
import kafka.consumer._ | |
import kafka.serializer._ | |
import scodec.bits.ByteVector | |
import java.util.concurrent.{Executors, ThreadFactory} | |
import java.util.concurrent.atomic.AtomicInteger |
import org.apache.avro.Schema | |
import org.apache.avro.generic.GenericRecord | |
import org.apache.avro.mapred.{AvroOutputFormat, AvroWrapper, AvroKey, AvroJob} | |
import org.apache.hadoop.io.NullWritable | |
import org.apache.hadoop.mapred.JobConf | |
import org.apache.spark.SparkContext | |
import org.apache.spark.rdd.RDD | |
package object avro { | |
implicit class AvroRDDFunctions[A <: GenericRecord](val rdd: RDD[A]) extends AnyVal { |
import java.nio.file._ | |
import java.nio.file.attribute._ | |
object FileIO { | |
def deleteDirectories(dir: Path): Unit = { | |
Files.walkFileTree(dir, new SimpleFileVisitor[Path] { | |
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { | |
Files.delete(file) | |
FileVisitResult.CONTINUE | |
} |
import com.netflix.hystrix.{HystrixCommandGroupKey, HystrixCommand} | |
import rx.Subscriber | |
import scala.concurrent.{Future, Promise} | |
package object hystrix { | |
implicit class HystrixCommandToFuture[A](val cmd: HystrixCommand[A]) extends AnyVal { | |
def futureNoRepeat: Future[A] = { | |
val p = Promise[A]() | |
cmd.observe().subscribe(new Subscriber[A]() { |
def extractPartitionKey(duration: ReadablePeriod, now: DateTime = DateTime.now(DateTimeZone.UTC)): Seq[String] = { | |
val end = now.withZone(DateTimeZone.UTC) | |
val period = new Interval(duration, end) | |
val start = period.getStart | |
val fmt = ISODateTimeFormat.date() | |
val days = Days.daysBetween(start.withTimeAtStartOfDay(), end).getDays | |
for (d <- 0 to days) yield start.plusDays(d).toString(fmt) | |
} |
I hereby claim:
To claim this, I am signing this object: