Created
October 8, 2020 00:17
-
-
Save ozars/2b01c9647bc34f16ab3c18eef3579147 to your computer and use it in GitHub Desktop.
Building spark scala API docs with internal packages
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala | |
index 6328daec02..3cf549eda9 100644 | |
--- a/project/SparkBuild.scala | |
+++ b/project/SparkBuild.scala | |
@@ -832,12 +832,12 @@ object Unidoc { | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/collection"))) | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/kvstore"))) | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst"))) | |
- .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution"))) | |
+ // .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution"))) | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/internal"))) | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test"))) | |
- .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils"))) | |
+ // .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils"))) | |
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive"))) | |
- .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro"))) | |
+ // .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro"))) | |
} | |
private def ignoreClasspaths(classpaths: Seq[Classpath]): Seq[Classpath] = { | |
@@ -846,6 +846,11 @@ object Unidoc { | |
.map(_.filterNot(_.data.getCanonicalPath.matches(""".*kafka_2\..*-0\.10.*"""))) | |
} | |
+ lazy val gitCommit: String = { | |
+ import sys.process._ | |
+ "git rev-parse HEAD" !! | |
+ } | |
+ | |
val unidocSourceBase = settingKey[String]("Base URL of source links in Scaladoc.") | |
lazy val settings = scalaJavaUnidocSettings ++ Seq ( | |
@@ -892,19 +897,19 @@ object Unidoc { | |
), | |
// Use GitHub repository for Scaladoc source links | |
- unidocSourceBase := s"https://github.com/apache/spark/tree/v${version.value}", | |
+ unidocSourceBase := s"https://github.com/apache/spark", | |
scalacOptions in (ScalaUnidoc, unidoc) ++= Seq( | |
"-groups", // Group similar methods together based on the @group annotation. | |
"-skip-packages", "org.apache.hadoop", | |
"-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath | |
) ++ ( | |
- // Add links to sources when generating Scaladoc for a non-snapshot release | |
- if (!isSnapshot.value) { | |
- Opts.doc.sourceUrl(unidocSourceBase.value + "€{FILE_PATH}.scala") | |
- } else { | |
- Seq() | |
- } | |
+ // Add links to sources when generating Scaladoc | |
+ Opts.doc.sourceUrl( | |
+ unidocSourceBase.value | |
+ + { if (!isSnapshot.value) s"/tree/v${version.value}" else s"/blob/$gitCommit" } | |
+ + "€{FILE_PATH}.scala" | |
+ ) | |
) | |
) | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment