Last active
September 5, 2018 07:58
-
-
Save olafurpg/e045ef9d8a4273bae3e2ccf610636d66 to your computer and use it in GitHub Desktop.
Experiment to add reformatOnCompile settings to scalafmt sbt >v0.5
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Works for scalafmt 0.5.5 | |
// From https://gist.github.com/hseeberger/03677ef75bfadb7663c3b41bb58c702b | |
// Thank you @hseeberger! | |
import org.scalafmt.bootstrap.ScalafmtBootstrap | |
import org.scalafmt.sbt.ScalafmtPlugin | |
import sbt._ | |
import sbt.Keys._ | |
import sbt.inc.Analysis | |
object AutomateScalafmtPlugin extends AutoPlugin { | |
object autoImport { | |
def automateScalafmtFor(configurations: Configuration*): Seq[Setting[_]] = | |
configurations.flatMap { c => | |
inConfig(c)( | |
Seq( | |
compileInputs.in(compile) := { | |
scalafmtInc.value | |
compileInputs.in(compile).value | |
}, | |
sourceDirectories.in(scalafmtInc) := Seq(scalaSource.value), | |
scalafmtInc := { | |
val cache = streams.value.cacheDirectory / "scalafmt" | |
val include = includeFilter.in(scalafmtInc).value | |
val exclude = excludeFilter.in(scalafmtInc).value | |
val sources = | |
sourceDirectories | |
.in(scalafmtInc) | |
.value | |
.descendantsExcept(include, exclude) | |
.get | |
.toSet | |
def format(handler: Set[File] => Unit, msg: String) = { | |
def update(handler: Set[File] => Unit, msg: String)(in: ChangeReport[File], | |
out: ChangeReport[File]) = { | |
val label = Reference.display(thisProjectRef.value) | |
val files = in.modified -- in.removed | |
Analysis | |
.counted("Scala source", "", "s", files.size) | |
.foreach(count => streams.value.log.info(s"$msg $count in $label ...")) | |
handler(files) | |
files | |
} | |
FileFunction.cached(cache)(FilesInfo.hash, FilesInfo.exists)(update(handler, msg))( | |
sources | |
) | |
} | |
def formattingHandler(files: Set[File]) = | |
if (files.nonEmpty) { | |
val filesArg = files.map(_.getAbsolutePath).mkString(",") | |
ScalafmtBootstrap.main(List("--quiet", "-i", "-f", filesArg)) | |
} | |
format(formattingHandler, "Formatting") | |
format(_ => (), "Reformatted") // Recalculate the cache | |
} | |
) | |
) | |
} | |
} | |
private val scalafmtInc = taskKey[Unit]("Incrementally format modified sources") | |
override def requires = ScalafmtPlugin | |
override def trigger = allRequirements | |
override def projectSettings = | |
(includeFilter.in(scalafmtInc) := "*.scala") +: autoImport.automateScalafmtFor(Compile, Test) | |
} |
This always recompile on some file.
I did this ugly hack to get changed files
scalafmtIncremental := Def.taskDyn {
val cache = streams.value.cacheDirectory / "scalafmt"
val include = (includeFilter in scalafmtIncremental).value
val exclude = (excludeFilter in scalafmtIncremental).value
val files: Set[File] =
(sourceDirectories in scalafmtIncremental).value
.descendantsExcept(include, exclude)
.get
.toSet
val label = Reference.display(thisProjectRef.value)
var changed = Set.empty[File]
def handleUpdate(in: ChangeReport[File],
out: ChangeReport[File]): Set[File] = {
val files = in.modified -- in.removed
import sbt._
inc.Analysis
.counted("Scala source", "", "s", files.size)
.foreach(count =>
streams.value.log.info(s"Formatting $count $label..."))
changed = files
files
}
val toFormat = FileFunction.cached(cache)(
FilesInfo.hash,
FilesInfo.exists
)(handleUpdate)(files)
if (changed.nonEmpty) {
val filesFlag = changed.map(_.getAbsolutePath).mkString(",")
val args = Seq("", "org.scalafmt.cli.Cli", "-i", "-f", filesFlag)
(runMain in ScalafmtPlugin.scalafmtStub).toTask(args.mkString(" "))
} else {
Def.task[Unit](())
}
}.value
Please update to the following which works (properly) for version 0.5.4 and hopefully higher:
import org.scalafmt.bootstrap.ScalafmtBootstrap
import org.scalafmt.sbt.ScalafmtPlugin
import sbt._
import sbt.Keys._
import sbt.inc.Analysis
object AutomateScalafmtPlugin extends AutoPlugin {
object autoImport {
def automateScalafmtFor(configurations: Configuration*): Seq[Setting[_]] =
configurations.flatMap { c =>
inConfig(c)(
Seq(
compileInputs.in(compile) := {
scalafmtInc.value
compileInputs.in(compile).value
},
sourceDirectories.in(scalafmtInc) := Seq(scalaSource.value),
scalafmtInc := {
val cache = streams.value.cacheDirectory / "scalafmt"
val include = includeFilter.in(scalafmtInc).value
val exclude = excludeFilter.in(scalafmtInc).value
val sources =
sourceDirectories
.in(scalafmtInc)
.value
.descendantsExcept(include, exclude)
.get
.toSet
def format(handler: Set[File] => Unit, msg: String) = {
def update(handler: Set[File] => Unit, msg: String)(
in: ChangeReport[File], out: ChangeReport[File]) = {
val label = Reference.display(thisProjectRef.value)
val files = in.modified -- in.removed
Analysis
.counted("Scala source", "", "s", files.size)
.foreach(count => streams.value.log.info(s"$msg $count in $label ..."))
handler(files)
files
}
FileFunction.cached(cache)(FilesInfo.hash,
FilesInfo.exists)(update(handler, msg))(sources)
}
def formattingHandler(files: Set[File]) =
if (files.nonEmpty) {
val filesArg = files.map(_.getAbsolutePath).mkString(",")
ScalafmtBootstrap.main(List("--non-interactive", "-i", "-f", filesArg))
}
format(formattingHandler, "Formatting")
format(_ => (), "Reformatted") // Recalculate the cache
}
)
)
}
}
private val scalafmtInc = taskKey[Unit]("Incrementally format modified sources")
override def requires = ScalafmtPlugin
override def trigger = allRequirements
override def projectSettings =
(includeFilter.in(scalafmtInc) := "*.scala") +: autoImport.automateScalafmtFor(Compile, Test)
}
There seems to be a race condition for multi projects in this version. Please see my changes in akka/alpakka@15228be
PS, thanks for providing this :)
@stephennancekivell Thanks for the fix!
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
See https://gist.github.com/pettyjamesm/ed6351ac2054a76ee6c223e216804298 for an extension of this plugin that handles empty list of files.