Skip to content

Instantly share code, notes, and snippets.

@pomadchin
pomadchin / Coercible.scala
Last active December 7, 2019 00:34
Auto derivation of Circe codecs for shapeless Newtypes and TaggedTypes
import shapeless.newtype.Newtype
/** Coercion to derive [[shapeless.newtype.Newtype]] and [[shapeless.tag.Tagged]] type classes */
trait Coercible[A, B] {
def apply(a: A): B = a.asInstanceOf[B]
}
object Coercible {
implicit def newTypeToCoercible[R: * => O, O]: Coercible[R, Newtype[R, O]] = new Coercible[R, Newtype[R, O]] { }
implicit def newTypeToCoercibleK[F[_], R: Coercible[*, Newtype[R, O]]: * => O, O]: Coercible[F[R], F[Newtype[R, O]]] = new Coercible[F[R], F[Newtype[R, O]]] { }
package com.azavea.scube
import com.azavea.scube.Test.WidgetId
import io.circe.syntax._
import io.circe.{Decoder, Encoder}
import io.circe.generic.extras.Configuration
import shapeless.newtype
import shapeless.newtype.Newtype
class Foo {
private def isFoo = true
def boo(other: Foo) {
if (other.isFoo) {
// ...
}
}
}
class Foo {
private[this] def isFoo = true
@pomadchin
pomadchin / spark-shell.scala
Last active October 14, 2019 17:44
Apache Spark cache and checkpoint examples
/**
* //> spark-shell
* 19/10/14 13:25:10 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
* Setting default log level to "WARN".
* To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
* Spark context Web UI available at http://localhost:4040
* Spark context available as 'sc' (master = local[*], app id = local-111).
* Spark session available as 'spark'.
* Welcome to
* ____ __

Можно задавать любые вопросы по теме, от совсем базовых, до требующих многолетних исследований. Но если постите примеры кода, то делайте его компилируемым. Шансы на помощь возрастают многократно. Подойдут сервисы https://scastie.scala-lang.org, https://scalafiddle.io. В крайнем случае gist, там хотя бы история и комменты есть.

Telegram

@pomadchin
pomadchin / s3sample.scala
Created May 10, 2019 00:10
Spark S3 credentials tip
import geotrellis.spark.io.hadoop._
import geotrellis.spark.util._
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.FileUtil
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
import org.apache.spark._
import org.apache.spark.sql._
@pomadchin
pomadchin / ClosureSerializerSpec.scala
Last active May 7, 2019 18:52
Spark Serializer usage example (for test purposes)
import org.apache.spark.SparkEnv
val serializer = SparkEnv.get.closureSerializer.newInstance()
val f1 = serializer.serialize(f)
val client2 = serializer.deserialize[() => S3Client](f1)
<Capabilities xmlns:gml="http://www.opengis.net/gml" xmlns:ows="http://www.opengis.net/ows/1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ows:ServiceIdentification>
<ows:Title>WMTS</ows:Title>
<ows:Abstract>Geotrellis WMTS Service</ows:Abstract>
<ows:Keywords/>
<ows:ServiceType>OGC WMTS</ows:ServiceType>
<ows:ServiceTypeVersion>1.0.0</ows:ServiceTypeVersion>
</ows:ServiceIdentification>
<ows:ServiceProvider>
<ows:ProviderName>Azavea</ows:ProviderName>
object test {
import cats.effect.{IO, Timer}
import cats.syntax.apply._
import scala.concurrent.ExecutionContext
import java.util.concurrent.Executors
def timedCreate[T](msg: String)(f: => T): T = {
val s = System.currentTimeMillis
@pomadchin
pomadchin / WKTParserTest.scala
Created August 22, 2018 07:36
GeoTrellis WKT Parser test for @wsf1990
package geotrellis.proj4.io.wkt
import org.scalatest.FunSpec
class WKTParserTest extends FunSpec {
it("GCS_WGS_1984") {
val strip = WKTParser("""|GEOGCS["GCS_WGS_1984",
|DATUM["D_WGS_1984", SPHEROID["WGS_1984", 6378137.0, 298.257223563]],
|PRIMEM["Greenwich", 0.0],
|UNIT["degree", 0.017453292519943295],