Eclipse Java C compiler bugs:
Scalac:
| val tpe = typeOf[test.GenericFoo[Int]] //> tpe : reflect.runtime.universe.Type = test.GenericFoo[Int] | |
| tpe.getClass //> res0: Class[?0] = class scala.reflect.internal.Types$TypeRef$$anon$5 | |
| tpe.takesTypeArgs //> res1: Boolean = false | |
| val tpe1 = typeOf[test.GenericFoo[_]].typeSymbol.typeSignature | |
| //> tpe1 : reflect.runtime.universe.Type = [T]AnyRef{def <init>(): test.Generic | |
| //| Foo[T]} | |
Eclipse Java C compiler bugs:
Scalac:
| // build.sbt | |
| resolvers ++= Seq( | |
| Resolver.sonatypeRepo("releases"), | |
| Resolver.sonatypeRepo("snapshots") | |
| ) | |
| // For Scala 2.11.0 | |
| scalaVersion := "2.11.0" | |
| libraryDependencies ++= Seq( |
| $ ./uber-build.sh config/sbt-publish-0.13.6-2.11.x.conf | |
| >>>>> Check arguments | |
| >>>>> Load config | |
| >>>>> Setup logging | |
| >>>>> Set flags | |
| >>>>> Check prerequisites | |
| >>>>> Check configuration | |
| 0 | |
| >>>>> Scala | |
| ----- org.scala-lang:scala-compiler:jar:2.11.6 found ! |
I hereby claim:
To claim this, I am signing this object:
| package test | |
| import org.apache.spark.SparkConf | |
| import org.apache.spark.SparkContext | |
| import org.apache.spark.rdd.RDD | |
| import org.apache.spark.sql.SQLContext | |
| import SQLContext._ | |
| import org.apache.spark.sql.types._ | |
| case class WikidataElement(id: String, sites: Map[String, String]) |
| ----- org.scala-lang:scala-compiler:jar:2.11.6 found ! | |
| >>>>> Zinc | |
| >> Building Zinc using dbuild | |
| >> Writing properties: /Volumes/Thunderbolt_SSD/dragos/workspace/plugin/uber-build/target/zinc/current-zinc-build.properties | |
| >> publish-repo=https://proxy-ch.typesafe.com:8082/artifactory/ide-2.11 | |
| sbt-tag=v0.13.8 | |
| sbt-version=0.13.8-on-2.11.6-for-IDE-SNAPSHOT | |
| >> Detected sbt version: 0.13.8-on-2.11.6-for-IDE-SNAPSHOT | |
| [scala2] --== Extracting dependencies for scala2 ==-- | |
| [scala2] ---------- |
| trait Encoder[T] | |
| trait JFunction[T, U] | |
| trait Dataset[T] { | |
| def map[U](f: T => U)(implicit encoder: Encoder[U]) | |
| def map[U](f: JFunction[T, U], encoder: Encoder[U]) | |
| } | |
| trait Testing { | |
| implicit val e: Encoder[Int] = ??? |
| package test | |
| // prints: | |
| // | |
| // test.Main.C prefix value: test.Main$@54a097cc | |
| // test.Main.o.D prefix value: Outer(o) | |
| object Main { | |
| val universe = scala.reflect.runtime.universe | |
| val mirror = universe.runtimeMirror(getClass.getClassLoader) | |
| import universe._ |
| // fails with: | |
| // $ /opt/scala-2.10.4/bin/scalac -d /tmp src/main/scala/infer.scala -cp ../spark/assembly/target/scala-2.10/spark-assembly-1.6.0-SNAPSHOT-hadoop2.2.0.jar | |
| // src/main/scala/infer.scala:27: error: missing parameter type for expanded function ((x$2) => x$2._2) | |
| // ds.groupBy(_._1).agg(sum(_._2), sum(_._3)).collect() | |
| // ^ | |
| // one error found | |
| // | |
| import org.apache.spark.sql.functions._ |