sbt
edf7e566bac8
Merge pull request #3125 from eed3si9n/wip/format
eugene yokota
7 days ago
[sbt 1.0] Reformat using Scalafmt

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
maxColumn = 100 project.git = true project.excludeFilters = [ /sbt-test/, /input_sources/, /contraband-scala/ ] # http://docs.scala-lang.org/style/scaladoc.html recommends the JavaDoc style. # scala/scala is written that way too https://github.com/scala/scala/blob/v2.12.2/src/library/scala/Predef.scala docstrings = JavaDoc # This also seems more idiomatic to include whitespace in import x.{ yyy } spaces.inImportCurlyBraces = true # This works around sequence wildcard (`_*`) turning into `_ *` spaces.beforeSeqWildcard = true # Vertical alignment only => for pattern matching align.tokens.add = [ { code = "=>", owner = "Case" } ]

14 15 16 17 18 19 20 21 22 23 24 14 15 16 17 18 19 20 21 22 23 24
matrix: fast_finish: true env: matrix:
- SBT_CMD=";test:compile;scalariformCheck"
- SBT_CMD=";test:compile;scalafmtCheck"
- SBT_CMD="mimaReportBinaryIssues" - SBT_CMD="safeUnitTests" - SBT_CMD="otherUnitTests" - SBT_CMD="scripted actions/*" - SBT_CMD="scripted apiinfo/*"

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373
import Util._ import Dependencies._ import Sxr.sxr import com.typesafe.tools.mima.core._, ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{ binaryIssueFilters, previousArtifact}
import com.typesafe.tools.mima.plugin.MimaKeys.{ binaryIssueFilters, previousArtifact }
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings // ThisBuild settings take lower precedence, // but can be shared across the multi projects.
def buildLevelSettings: Seq[Setting[_]] = inThisBuild(Seq( organization := "org.scala-sbt", version := "1.0.0-SNAPSHOT", description := "sbt is an interactive build tool", bintrayOrganization := Some("sbt"), bintrayRepository := { if (publishStatus.value == "releases") "maven-releases" else "maven-snapshots" }, bintrayPackage := "sbt", bintrayReleaseOnPublish := false, licenses := List("BSD New" -> url("https://github.com/sbt/sbt/blob/0.13/LICENSE")), developers := List( Developer("harrah", "Mark Harrah", "@harrah", url("https://github.com/harrah")), Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")), Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")), Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")), Developer("gkossakowski", "Grzegorz Kossakowski", "@gkossakowski", url("https://github.com/gkossakowski")), Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm")) ), homepage := Some(url("https://github.com/sbt/sbt")), scmInfo := Some(ScmInfo(url("https://github.com/sbt/sbt"), "git@github.com:sbt/sbt.git")), resolvers += Resolver.mavenLocal ))
def buildLevelSettings: Seq[Setting[_]] = inThisBuild( Seq( organization := "org.scala-sbt", version := "1.0.0-SNAPSHOT", description := "sbt is an interactive build tool", bintrayOrganization := Some("sbt"), bintrayRepository := { if (publishStatus.value == "releases") "maven-releases" else "maven-snapshots" }, bintrayPackage := "sbt", bintrayReleaseOnPublish := false, licenses := List("BSD New" -> url("https://github.com/sbt/sbt/blob/0.13/LICENSE")), developers := List( Developer("harrah", "Mark Harrah", "@harrah", url("https://github.com/harrah")), Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")), Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")), Developer("dwijnand", "Dale Wijnand", "@dwijnand", url("https://github.com/dwijnand")), Developer("gkossakowski", "Grzegorz Kossakowski", "@gkossakowski", url("https://github.com/gkossakowski")), Developer("Duhemm", "Martin Duhem", "@Duhemm", url("https://github.com/Duhemm")) ), homepage := Some(url("https://github.com/sbt/sbt")), scmInfo := Some(ScmInfo(url("https://github.com/sbt/sbt"), "git@github.com:sbt/sbt.git")), resolvers += Resolver.mavenLocal ))
def commonSettings: Seq[Setting[_]] = Seq[SettingsDefinition]( scalaVersion := baseScalaVersion, componentID := None, resolvers += Resolver.typesafeIvyRepo("releases"), resolvers += Resolver.sonatypeRepo("snapshots"), resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/", concurrentRestrictions in Global += Util.testExclusiveRestriction, testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"), javacOptions in compile ++= Seq("-target", "6", "-source", "6", "-Xlint", "-Xlint:-serial"), incOptions := incOptions.value.withNameHashing(true), crossScalaVersions := Seq(baseScalaVersion), bintrayPackage := (bintrayPackage in ThisBuild).value, bintrayRepository := (bintrayRepository in ThisBuild).value, mimaDefaultSettings, publishArtifact in Test := false, mimaPreviousArtifacts := Set.empty, // Set(organization.value % moduleName.value % "1.0.0"), mimaBinaryIssueFilters ++= Seq( ) ) flatMap (_.settings)
def commonSettings: Seq[Setting[_]] = Seq[SettingsDefinition]( scalaVersion := baseScalaVersion, componentID := None, resolvers += Resolver.typesafeIvyRepo("releases"), resolvers += Resolver.sonatypeRepo("snapshots"), resolvers += "bintray-sbt-maven-releases" at "https://dl.bintray.com/sbt/maven-releases/", concurrentRestrictions in Global += Util.testExclusiveRestriction, testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"), javacOptions in compile ++= Seq("-target", "6", "-source", "6", "-Xlint", "-Xlint:-serial"), incOptions := incOptions.value.withNameHashing(true), crossScalaVersions := Seq(baseScalaVersion), bintrayPackage := (bintrayPackage in ThisBuild).value, bintrayRepository := (bintrayRepository in ThisBuild).value, mimaDefaultSettings, publishArtifact in Test := false, mimaPreviousArtifacts := Set.empty, // Set(organization.value % moduleName.value % "1.0.0"), mimaBinaryIssueFilters ++= Seq( ) ) flatMap (_.settings)
def minimalSettings: Seq[Setting[_]] = commonSettings ++ customCommands ++
publishPomSettings ++ Release.javaVersionCheckSettings
publishPomSettings ++ Release.javaVersionCheckSettings
def baseSettings: Seq[Setting[_]] =
minimalSettings ++ Seq(projectComponent) ++ baseScalacOptions ++ Licensed.settings ++ Formatting.settings
minimalSettings ++ Seq(projectComponent) ++ baseScalacOptions ++ Licensed.settings
def testedBaseSettings: Seq[Setting[_]] = baseSettings ++ testDependencies
lazy val sbtRoot: Project = (project in file(".")). enablePlugins(ScriptedPlugin). configs(Sxr.sxrConf). aggregate(nonRoots: _*). settings(
lazy val sbtRoot: Project = (project in file(".")) .enablePlugins(ScriptedPlugin) .configs(Sxr.sxrConf) .aggregate(nonRoots: _*) .settings(
buildLevelSettings, minimalSettings, rootSettings, publish := {}, publishLocal := {} ) // This is used to configure an sbt-launcher for this version of sbt. lazy val bundledLauncherProj =
(project in file("launch")). settings( minimalSettings, inConfig(Compile)(Transform.configSettings), Release.launcherSettings(sbtLaunchJar) ). enablePlugins(SbtLauncherPlugin). settings( name := "sbt-launch", moduleName := "sbt-launch", description := "sbt application launcher", autoScalaLibrary := false, crossPaths := false, publish := Release.deployLauncher.value, publishLauncher := Release.deployLauncher.value, packageBin in Compile := sbtLaunchJar.value )
(project in file("launch")) .settings( minimalSettings, inConfig(Compile)(Transform.configSettings), Release.launcherSettings(sbtLaunchJar) ) .enablePlugins(SbtLauncherPlugin) .settings( name := "sbt-launch", moduleName := "sbt-launch", description := "sbt application launcher", autoScalaLibrary := false, crossPaths := false, publish := Release.deployLauncher.value, publishLauncher := Release.deployLauncher.value, packageBin in Compile := sbtLaunchJar.value )
/* ** subproject declarations ** */ /* **** Intermediate-level Modules **** */ // Runner for uniform test interface
lazy val testingProj = (project in file("testing")). enablePlugins(ContrabandPlugin, JsonCodecPlugin). dependsOn(testAgentProj). settings(
lazy val testingProj = (project in file("testing")) .enablePlugins(ContrabandPlugin, JsonCodecPlugin) .dependsOn(testAgentProj) .settings(
baseSettings, name := "Testing",
libraryDependencies ++= Seq(testInterface,launcherInterface, sjsonNewScalaJson),
libraryDependencies ++= Seq(testInterface, launcherInterface, sjsonNewScalaJson),
managedSourceDirectories in Compile += baseDirectory.value / "src" / "main" / "contraband-scala", sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala", contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats
). configure(addSbtIO, addSbtCompilerClasspath, addSbtUtilLogging)
) .configure(addSbtIO, addSbtCompilerClasspath, addSbtUtilLogging)
// Testing agent for running tests in a separate process.
lazy val testAgentProj = (project in file("testing") / "agent"). settings( minimalSettings, crossScalaVersions := Seq(baseScalaVersion), crossPaths := false, autoScalaLibrary := false, name := "Test Agent", libraryDependencies += testInterface )
lazy val testAgentProj = (project in file("testing") / "agent").settings( minimalSettings, crossScalaVersions := Seq(baseScalaVersion), crossPaths := false, autoScalaLibrary := false, name := "Test Agent", libraryDependencies += testInterface )
// Basic task engine
lazy val taskProj = (project in file("tasks")). settings(
lazy val taskProj = (project in file("tasks")) .settings(
testedBaseSettings, name := "Tasks"
). configure(addSbtUtilControl, addSbtUtilCollection)
) .configure(addSbtUtilControl, addSbtUtilCollection)
// Standard task system. This provides map, flatMap, join, and more on top of the basic task model.
lazy val stdTaskProj = (project in file("tasks-standard")). dependsOn (taskProj % "compile;test->test"). settings(
lazy val stdTaskProj = (project in file("tasks-standard")) .dependsOn(taskProj % "compile;test->test") .settings(
testedBaseSettings, name := "Task System", testExclusive
). configure(addSbtUtilCollection, addSbtUtilLogging, addSbtUtilCache, addSbtIO)
) .configure(addSbtUtilCollection, addSbtUtilLogging, addSbtUtilCache, addSbtIO)
// Embedded Scala code runner
lazy val runProj = (project in file("run")). settings(
lazy val runProj = (project in file("run")) .settings(
testedBaseSettings, name := "Run"
). configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath)
) .configure(addSbtIO, addSbtUtilLogging, addSbtCompilerClasspath)
lazy val scriptedSbtProj = (project in scriptedPath / "sbt"). dependsOn(commandProj). settings(
lazy val scriptedSbtProj = (project in scriptedPath / "sbt") .dependsOn(commandProj) .settings(
baseSettings, name := "Scripted sbt", libraryDependencies ++= Seq(launcherInterface % "provided")
). configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted)
) .configure(addSbtIO, addSbtUtilLogging, addSbtCompilerInterface, addSbtUtilScripted)
lazy val scriptedPluginProj = (project in scriptedPath / "plugin"). dependsOn(sbtProj). settings(
lazy val scriptedPluginProj = (project in scriptedPath / "plugin") .dependsOn(sbtProj) .settings(
baseSettings, name := "Scripted Plugin"
). configure(addSbtCompilerClasspath)
) .configure(addSbtCompilerClasspath)
// Implementation and support code for defining actions.
lazy val actionsProj = (project in file("main-actions")). dependsOn(runProj, stdTaskProj, taskProj, testingProj). settings(
lazy val actionsProj = (project in file("main-actions")) .dependsOn(runProj, stdTaskProj, taskProj, testingProj) .settings(
testedBaseSettings, name := "Actions", libraryDependencies += sjsonNewScalaJson
). configure(addSbtCompilerClasspath, addSbtUtilCompletion, addSbtCompilerApiInfo, addSbtZinc, addSbtCompilerIvyIntegration, addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilRelation, addSbtLm, addSbtUtilTracking)
) .configure( addSbtCompilerClasspath, addSbtUtilCompletion, addSbtCompilerApiInfo, addSbtZinc, addSbtCompilerIvyIntegration, addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilRelation, addSbtLm, addSbtUtilTracking )
lazy val protocolProj = (project in file("protocol")). enablePlugins(ContrabandPlugin, JsonCodecPlugin). settings(
lazy val protocolProj = (project in file("protocol")) .enablePlugins(ContrabandPlugin, JsonCodecPlugin) .settings(
testedBaseSettings, name := "Protocol", libraryDependencies ++= Seq(sjsonNewScalaJson), managedSourceDirectories in Compile += baseDirectory.value / "src" / "main" / "contraband-scala", sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala", contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats
). configure(addSbtUtilLogging)
) .configure(addSbtUtilLogging)
// General command support and core commands not specific to a build system
lazy val commandProj = (project in file("main-command")). enablePlugins(ContrabandPlugin, JsonCodecPlugin). dependsOn(protocolProj). settings(
lazy val commandProj = (project in file("main-command")) .enablePlugins(ContrabandPlugin, JsonCodecPlugin) .dependsOn(protocolProj) .settings(
testedBaseSettings, name := "Command", libraryDependencies ++= Seq(launcherInterface, sjsonNewScalaJson, templateResolverApi), managedSourceDirectories in Compile += baseDirectory.value / "src" / "main" / "contraband-scala", sourceManaged in (Compile, generateContrabands) := baseDirectory.value / "src" / "main" / "contraband-scala", contrabandFormatsForType in generateContrabands in Compile := ContrabandConfig.getFormats
). configure(addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilCompletion, addSbtCompilerClasspath, addSbtLm)
) .configure(addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilCompletion, addSbtCompilerClasspath, addSbtLm)
// Fixes scope=Scope for Setting (core defined in collectionProj) to define the settings system used in build definitions
lazy val mainSettingsProj = (project in file("main-settings")). dependsOn(commandProj, stdTaskProj). settings(
lazy val mainSettingsProj = (project in file("main-settings")) .dependsOn(commandProj, stdTaskProj) .settings(
testedBaseSettings, name := "Main Settings"
). configure(addSbtUtilCache, addSbtUtilApplyMacro, addSbtCompilerInterface, addSbtUtilRelation, addSbtUtilLogging, addSbtIO, addSbtUtilCompletion, addSbtCompilerClasspath, addSbtLm)
) .configure( addSbtUtilCache, addSbtUtilApplyMacro, addSbtCompilerInterface, addSbtUtilRelation, addSbtUtilLogging, addSbtIO, addSbtUtilCompletion, addSbtCompilerClasspath, addSbtLm )
// The main integration project for sbt. It brings all of the projects together, configures them, and provides for overriding conventions.
lazy val mainProj = (project in file("main")). dependsOn(actionsProj, mainSettingsProj, runProj, commandProj). disablePlugins(SbtScalariform). settings(
lazy val mainProj = (project in file("main")) .dependsOn(actionsProj, mainSettingsProj, runProj, commandProj) .settings(
testedBaseSettings, name := "Main", libraryDependencies ++= scalaXml.value ++ Seq(launcherInterface)
). configure(addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilLogic, addSbtLm, addSbtZincCompile)
) .configure(addSbtCompilerInterface, addSbtIO, addSbtUtilLogging, addSbtUtilLogic, addSbtLm, addSbtZincCompile)
// Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object // technically, we need a dependency on all of mainProj's dependencies, but we don't do that since this is strictly an integration project // with the sole purpose of providing certain identifiers without qualification (with a package object)
lazy val sbtProj = (project in file("sbt")). dependsOn(mainProj, scriptedSbtProj % "test->test"). settings(
lazy val sbtProj = (project in file("sbt")) .dependsOn(mainProj, scriptedSbtProj % "test->test") .settings(
baseSettings, name := "sbt", normalizedName := "sbt", crossScalaVersions := Seq(baseScalaVersion), crossPaths := false
). configure(addSbtCompilerBridge)
) .configure(addSbtCompilerBridge)
def scriptedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed publishLocalBinAll.value // These two projects need to be visible in a repo even if the default // local repository is hidden, so we publish them to an alternate location and add // that alternate repo to the running scripted test (in Scripted.scriptedpreScripted). // (altLocalPublish in interfaceProj).value // (altLocalPublish in compileInterfaceProj).value
Scripted.doScripted((sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value,
Scripted.doScripted( (sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value, scriptedBufferLog.value, result, scriptedPrescripted.value, scriptedLaunchOpts.value)
scriptedSource.value, scriptedBufferLog.value, result, scriptedPrescripted.value, scriptedLaunchOpts.value )
} def scriptedUnpublishedTask: Def.Initialize[InputTask[Unit]] = Def.inputTask { val result = scriptedSource(dir => (s: State) => Scripted.scriptedParser(dir)).parsed
Scripted.doScripted((sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value,
Scripted.doScripted( (sbtLaunchJar in bundledLauncherProj).value, (fullClasspath in scriptedSbtProj in Test).value,
(scalaInstance in scriptedSbtProj).value,
scriptedSource.value, scriptedBufferLog.value, result, scriptedPrescripted.value, scriptedLaunchOpts.value)
scriptedSource.value, scriptedBufferLog.value, result, scriptedPrescripted.value, scriptedLaunchOpts.value )
} lazy val publishLauncher = TaskKey[Unit]("publish-launcher") lazy val myProvided = config("provided") intransitive
def allProjects = Seq( testingProj, testAgentProj, taskProj, stdTaskProj, runProj, scriptedSbtProj, scriptedPluginProj, protocolProj, actionsProj, commandProj, mainSettingsProj, mainProj, sbtProj, bundledLauncherProj)
def allProjects = Seq( testingProj, testAgentProj, taskProj, stdTaskProj, runProj, scriptedSbtProj, scriptedPluginProj, protocolProj, actionsProj, commandProj, mainSettingsProj, mainProj, sbtProj, bundledLauncherProj )
def projectsWithMyProvided = allProjects.map(p => p.copy(configurations = (p.configurations.filter(_ != Provided)) :+ myProvided))
def projectsWithMyProvided = allProjects.map(p => p.copy(configurations = (p.configurations.filter(_ != Provided)) :+ myProvided))
lazy val nonRoots = projectsWithMyProvided.map(p => LocalProject(p.id))
def rootSettings = fullDocSettings ++ Util.publishPomSettings ++ otherRootSettings ++ Formatting.sbtFilesSettings ++ Transform.conscriptSettings(bundledLauncherProj) def otherRootSettings = Seq( scripted := scriptedTask.evaluated, scriptedUnpublished := scriptedUnpublishedTask.evaluated, scriptedSource := (sourceDirectory in sbtProj).value / "sbt-test", // scriptedPrescripted := { addSbtAlternateResolver _ }, scriptedLaunchOpts := List("-XX:MaxPermSize=256M", "-Xmx1G"), publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value }, publishLocalBinAll := { val _ = (publishLocalBin).all(ScopeFilter(inAnyProject)).value }, aggregate in bintrayRelease := false ) ++ inConfig(Scripted.RepoOverrideTest)(Seq( scriptedPrescripted := { _ => () }, scriptedLaunchOpts := { List("-XX:MaxPermSize=256M", "-Xmx1G", "-Dsbt.override.build.repos=true", s"""-Dsbt.repository.config=${ scriptedSource.value / "repo.config" }""") }, scripted := scriptedTask.evaluated, scriptedUnpublished := scriptedUnpublishedTask.evaluated, scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test" ))
def rootSettings = fullDocSettings ++ Util.publishPomSettings ++ otherRootSettings ++ Transform.conscriptSettings(bundledLauncherProj) def otherRootSettings = Seq( scripted := scriptedTask.evaluated, scriptedUnpublished := scriptedUnpublishedTask.evaluated, scriptedSource := (sourceDirectory in sbtProj).value / "sbt-test", // scriptedPrescripted := { addSbtAlternateResolver _ }, scriptedLaunchOpts := List("-XX:MaxPermSize=256M", "-Xmx1G"), publishAll := { val _ = (publishLocal).all(ScopeFilter(inAnyProject)).value }, publishLocalBinAll := { val _ = (publishLocalBin).all(ScopeFilter(inAnyProject)).value }, aggregate in bintrayRelease := false ) ++ inConfig(Scripted.RepoOverrideTest)( Seq( scriptedPrescripted := { _ => () }, scriptedLaunchOpts := { List("-XX:MaxPermSize=256M", "-Xmx1G", "-Dsbt.override.build.repos=true", s"""-Dsbt.repository.config=${scriptedSource.value / "repo.config"}""") }, scripted := scriptedTask.evaluated, scriptedUnpublished := scriptedUnpublishedTask.evaluated, scriptedSource := (sourceDirectory in sbtProj).value / "repo-override-test" ))
// def addSbtAlternateResolver(scriptedRoot: File) = { // val resolver = scriptedRoot / "project" / "AddResolverPlugin.scala" // if (!resolver.exists) { // IO.write(resolver, s"""import sbt._
...
335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
dependencyClasspath in (Compile, doc) := (fullClasspath in sxr).value ) lazy val safeUnitTests = taskKey[Unit]("Known working tests (for both 2.10 and 2.11)") lazy val safeProjects: ScopeFilter = ScopeFilter(
inProjects(mainSettingsProj, mainProj, actionsProj, runProj, stdTaskProj),
inProjects(mainSettingsProj, mainProj, actionsProj, runProj, stdTaskProj),
inConfigurations(Test) ) lazy val otherUnitTests = taskKey[Unit]("Unit test other projects") lazy val otherProjects: ScopeFilter = ScopeFilter(
inProjects( testingProj, testAgentProj, taskProj, scriptedSbtProj, scriptedPluginProj, commandProj, mainSettingsProj, mainProj, sbtProj),
inProjects(testingProj, testAgentProj, taskProj, scriptedSbtProj, scriptedPluginProj, commandProj, mainSettingsProj, mainProj, sbtProj),
inConfigurations(Test) ) def customCommands: Seq[Setting[_]] = Seq( commands += Command.command("setupBuildScala212") { state =>
...
360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
test.all(safeProjects).value }, otherUnitTests := { test.all(otherProjects).value },
commands += Command.command("scalafmtCheck") { state => sys.process.Process("git diff --name-only --exit-code").! match { case 0 => // ok case x => sys.error("git diff detected! Did you compile before committing?") } state },
commands += Command.command("release-sbt-local") { state => "clean" ::
"so compile" :: "so publishLocal" :: "reload" :: state
"so compile" :: "so publishLocal" :: "reload" :: state
}, /** There are several complications with sbt's build. * First is the fact that interface project is a Java-only project * that uses source generator from datatype subproject in Scala 2.10.6. *

2 3 4 5 6 7 8 9 10 11 12 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
* Copyright 2010 Mark Harrah */ package sbt import sbt.internal.inc.javac.JavaTools
import sbt.internal.inc.{ AnalyzingCompiler, ComponentCompiler, ScalaInstance, ZincComponentManager, IncrementalCompilerImpl }
import sbt.internal.inc.{ AnalyzingCompiler, ComponentCompiler, ScalaInstance, ZincComponentManager, IncrementalCompilerImpl }
import xsbti.{ Logger => _, _ } import xsbti.compile.{ ClasspathOptions, Compilers, CompileResult, Inputs } import java.io.File import sbt.internal.librarymanagement.IvyConfiguration
...
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
case VersionNumber(Seq(2, y, _), _, _) if y == 11 => scalaCompilerBridgeSource2_11 case _ => scalaCompilerBridgeSource2_12 } private[this] def scalaCompilerBridgeSource(suffix: String): ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, s"compiler-bridge_$suffix", ComponentCompiler.incrementalVersion)
ModuleID(xsbti.ArtifactInfo.SbtOrganization, s"compiler-bridge_$suffix", ComponentCompiler.incrementalVersion)
.withConfigurations(Some("component")) .sources() private[sbt] def scalaCompilerBridgeSource2_10: ModuleID = scalaCompilerBridgeSource("2.10") private[sbt] def scalaCompilerBridgeSource2_11: ModuleID = scalaCompilerBridgeSource("2.11") private[sbt] def scalaCompilerBridgeSource2_12: ModuleID = scalaCompilerBridgeSource("2.12") def compilers(
cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration
cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration
)(implicit app: AppConfiguration, log: Logger): Compilers = { val scalaProvider = app.provider.scalaProvider val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher) val sourceModule = scalaCompilerBridgeSource2_12 compilers(instance, cpOptions, None, ivyConfiguration, sourceModule) } // TODO: Get java compiler def compilers(
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID
)(implicit app: AppConfiguration, log: Logger): Compilers = { val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, sourcesModule) val javac = JavaTools.directOrFork(instance, cpOptions, javaHome) new Compilers(scalac, javac) } def scalaCompiler(
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID
)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = { val launcher = app.provider.scalaProvider.launcher
val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, sourcesModule)
val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None) } private val compiler = new IncrementalCompilerImpl def compile(in: Inputs, log: Logger): CompileResult = compiler.compile(in, log) private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =
mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } }
mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } }
}

9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
import xsbti.compile.{ Inputs, Compilers } import scala.util.Try final class Console(compiler: AnalyzingCompiler) {
/** Starts an interactive scala interpreter session with the given classpath.*/ def apply(classpath: Seq[File], log: Logger): Try[Unit] = apply(classpath, Nil, "", "", log)
def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger): Try[Unit] =
def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String, log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log)
def apply(classpath: Seq[File], options: Seq[String], loader: ClassLoader, initialCommands: String, cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
def apply(classpath: Seq[File], options: Seq[String], loader: ClassLoader, initialCommands: String, cleanupCommands: String)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings)
def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = { def console0() = compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings) // TODO: Fix JLine //JLine.withJLine(Run.executeTrapExit(console0, log)) Run.executeTrapExit(console0, log) }
def apply(classpath: Seq[File], options: Seq[String], initialCommands: String, cleanupCommands: String)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])( implicit log: Logger): Try[Unit] = { def console0() = compiler.console(classpath, options, initialCommands, cleanupCommands, log)(loader, bindings) // TODO: Fix JLine //JLine.withJLine(Run.executeTrapExit(console0, log)) Run.executeTrapExit(console0, log) }
} object Console { def apply(conf: Inputs): Console = conf.compilers match { case cs: Compilers => new Console(cs.scalac match { case x: AnalyzingCompiler => x })

17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
import sbt.util.Logger import sbt.internal.util.ManagedLogger object Doc { import RawCompileLike._
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler): Gen =
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler): Gen =
scaladoc(label, cacheStoreFactory, compiler, Seq())
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen = cached(cacheStoreFactory, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc)) def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter): Gen =
def scaladoc(label: String, cacheStoreFactory: CacheStoreFactory, compiler: AnalyzingCompiler, fileInputOptions: Seq[String]): Gen = cached(cacheStoreFactory, fileInputOptions, prepare(label + " Scala API documentation", compiler.doc)) def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter): Gen =
javadoc(label, cacheStoreFactory, doc, log, reporter, Seq())
def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter, fileInputOptions: Seq[String]): Gen = cached(cacheStoreFactory, fileInputOptions, prepare(label + " Java API documentation", filterSources( javaSourcesOnly, (sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger) => { // doc.doc ??? } )))
def javadoc(label: String, cacheStoreFactory: CacheStoreFactory, doc: JavaTools, log: Logger, reporter: Reporter, fileInputOptions: Seq[String]): Gen = cached( cacheStoreFactory, fileInputOptions, prepare( label + " Java API documentation", filterSources( javaSourcesOnly, (sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger) => { // doc.doc ??? } ) ) )
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java") private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc {
def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: ManagedLogger): Unit = { generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log)
def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: ManagedLogger): Unit = { generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log)
} } } sealed trait Doc { type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private[sbt] final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: ManagedLogger): Unit = {
private[sbt] final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: ManagedLogger): Unit = {
val logSnip = variant + " API documentation" if (sources.isEmpty) log.info("No sources available, skipping " + logSnip + "...") else {
log.info("Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...")
log.info( "Generating " + logSnip + " for " + label + " sources to " + outputDirectory.absolutePath + "...")
IO.delete(outputDirectory) IO.createDirectory(outputDirectory) docf(sources, classpath, outputDirectory, options, maxErrors, log) log.info(logSnip + " generation successful.") }

16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
def sources(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = { val toString = fToString(sourceRoots) apply(relations, outputDirectory, toString, toString) } def packages(relations: Relations, outputDirectory: File, sourceRoots: Iterable[File]): Unit = {
val packageOnly = (path: String) => { val last = path.lastIndexOf(File.separatorChar.toInt) val packagePath = (if (last > 0) path.substring(0, last) else path).trim if (packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.') }
val packageOnly = (path: String) => { val last = path.lastIndexOf(File.separatorChar.toInt) val packagePath = (if (last > 0) path.substring(0, last) else path).trim if (packagePath.isEmpty) "" else packagePath.replace(File.separatorChar, '.') }
val toString = packageOnly compose fToString(sourceRoots) apply(relations, outputDirectory, toString, toString) }
def apply(relations: Relations, outputDir: File, sourceToString: File => String, externalToString: File => String): Unit = {
def apply(relations: Relations, outputDir: File, sourceToString: File => String, externalToString: File => String): Unit = {
def file(name: String) = new File(outputDir, name) IO.createDirectory(outputDir)
generateGraph(file("int-class-deps"), "dependencies", relations.internalClassDep, identity[String], identity[String]) generateGraph(file("binary-dependencies"), "externalDependencies", relations.libraryDep, externalToString, sourceToString)
generateGraph(file("int-class-deps"), "dependencies", relations.internalClassDep, identity[String], identity[String]) generateGraph(file("binary-dependencies"), "externalDependencies", relations.libraryDep, externalToString, sourceToString)
}
def generateGraph[K, V](file: File, graphName: String, relation: Relation[K, V], keyToString: K => String, valueToString: V => String): Unit = {
def generateGraph[K, V](file: File, graphName: String, relation: Relation[K, V], keyToString: K => String, valueToString: V => String): Unit = {
import scala.collection.mutable.{ HashMap, HashSet } val mappedGraph = new HashMap[String, HashSet[String]] for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values) mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString(value)
...
56 57 58 59 60 61 62 63 64 65 66 67 69 70 71 72 73 74 75 76 77 78 79
IO.writeLines(file, lines) } def sourceToString(roots: Iterable[File], source: File) = relativized(roots, source).trim.stripSuffix(".scala").stripSuffix(".java")
private def relativized(roots: Iterable[File], path: File): String = { val relativized = roots.flatMap(root => IO.relativize(root, path)) val shortest = (Int.MaxValue /: relativized)(_ min _.length) relativized.find(_.length == shortest).getOrElse(path.getName) }
private def relativized(roots: Iterable[File], path: File): String = { val relativized = roots.flatMap(root => IO.relativize(root, path)) val shortest = (Int.MaxValue /: relativized)(_ min _.length) relativized.find(_.length == shortest).getOrElse(path.getName) }
}

13 14 15 16 17 18 19 20 21 22 23 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
import sbt.util.Logger import sbt.ConcurrentRestrictions.Tag import sbt.protocol.testing._ private[sbt] object ForkTests {
def apply(runners: Map[TestFramework, Runner], tests: List[TestDefinition], config: Execution, classpath: Seq[File], fork: ForkOptions, log: Logger, tag: Tag): Task[TestOutput] = {
def apply(runners: Map[TestFramework, Runner], tests: List[TestDefinition], config: Execution, classpath: Seq[File], fork: ForkOptions, log: Logger, tag: Tag): Task[TestOutput] = {
val opts = processOptions(config, tests, log) import std.TaskExtra._ val dummyLoader = this.getClass.getClassLoader // can't provide the loader for test classes, which is in another jvm def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader))
...
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
main.tag(tag).dependsOn(all(opts.setup): _*) flatMap { results => all(opts.cleanup).join.map(_ => results) } }
private[this] def mainTestTask(runners: Map[TestFramework, Runner], opts: ProcessedOptions, classpath: Seq[File], fork: ForkOptions, log: Logger, parallel: Boolean): Task[TestOutput] =
private[this] def mainTestTask(runners: Map[TestFramework, Runner], opts: ProcessedOptions, classpath: Seq[File], fork: ForkOptions, log: Logger, parallel: Boolean): Task[TestOutput] =
std.TaskExtra.task { val server = new ServerSocket(0) val testListeners = opts.testListeners flatMap { case tl: TestsListener => Some(tl) case _ => None } object Acceptor extends Runnable { val resultsAcc = mutable.Map.empty[String, SuiteResult]
lazy val result = TestOutput(overall(resultsAcc.values.map(_.result)), resultsAcc.toMap, Iterable.empty)
lazy val result = TestOutput(overall(resultsAcc.values.map(_.result)), resultsAcc.toMap, Iterable.empty)
def run(): Unit = { val socket = try { server.accept() } catch { case e: java.net.SocketException =>
log.error("Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage)
log.error( "Could not accept connection from test agent: " + e.getClass + ": " + e.getMessage)
log.trace(e) server.close() return } val os = new ObjectOutputStream(socket.getOutputStream)
...
62 63 64 65 66 67 68 69 70 71 72 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
try { val config = new ForkConfiguration(log.ansiCodesSupported, parallel) os.writeObject(config)
val taskdefs = opts.tests.map(t => new TaskDef(t.name, forkFingerprint(t.fingerprint), t.explicitlySpecified, t.selectors))
val taskdefs = opts.tests.map( t => new TaskDef(t.name, forkFingerprint(t.fingerprint), t.explicitlySpecified, t.selectors))
os.writeObject(taskdefs.toArray) os.writeInt(runners.size) for ((testFramework, mainRunner) <- runners) { os.writeObject(testFramework.implClassNames.toArray)
...
77 78 79 80 81 82 83 84 85 86 87 95 96 97 98 99 100 101 102 103 104 105 106
new React(is, os, log, opts.testListeners, resultsAcc).react() } catch { case NonFatal(e) => def throwableToString(t: Throwable) = {
import java.io._; val sw = new StringWriter; t.printStackTrace(new PrintWriter(sw)); sw.toString
import java.io._; val sw = new StringWriter; t.printStackTrace(new PrintWriter(sw)); sw.toString
} resultsAcc("Forked test harness failed: " + throwableToString(e)) = SuiteResult.Error } finally { is.close(); os.close(); socket.close() }
...
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
try { testListeners.foreach(_.doInit()) val acceptorThread = new Thread(Acceptor) acceptorThread.start()
val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain], IO.classLocationFile[Framework]) val options = Seq("-classpath", fullCp mkString File.pathSeparator, classOf[ForkMain].getCanonicalName, server.getLocalPort.toString)
val fullCp = classpath ++: Seq(IO.classLocationFile[ForkMain], IO.classLocationFile[Framework]) val options = Seq("-classpath", fullCp mkString File.pathSeparator, classOf[ForkMain].getCanonicalName, server.getLocalPort.toString)
val ec = Fork.java(fork, options) val result = if (ec != 0)
TestOutput(TestResult.Error, Map("Running java with options " + options.mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error), Iterable.empty)
TestOutput(TestResult.Error, Map( "Running java with options " + options .mkString(" ") + " failed with exit code " + ec -> SuiteResult.Error), Iterable.empty)
else { // Need to wait acceptor thread to finish its business acceptorThread.join() Acceptor.result }
...
117 118 119 120 121 122 123 124 125 126 127 128 129 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
case s: SubclassFingerprint => new ForkMain.SubclassFingerscan(s) case a: AnnotatedFingerprint => new ForkMain.AnnotatedFingerscan(a) case _ => sys.error("Unknown fingerprint type: " + f.getClass) } }
private final class React(is: ObjectInputStream, os: ObjectOutputStream, log: Logger, listeners: Seq[TestReportListener], results: mutable.Map[String, SuiteResult]) {
private final class React(is: ObjectInputStream, os: ObjectOutputStream, log: Logger, listeners: Seq[TestReportListener], results: mutable.Map[String, SuiteResult]) {
import ForkTags._
@annotation.tailrec def react(): Unit = is.readObject match {
@annotation.tailrec def react(): Unit = is.readObject match {
case `Done` => os.writeObject(Done); os.flush() case Array(`Error`, s: String) => log.error(s); react() case Array(`Warn`, s: String) =>

25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 25 26 27 28 29 30 31 32 33 34 35 36 37 38
final case class JarManifest(m: Manifest) extends PackageOption { assert(m != null) } final case class MainClass(mainClassName: String) extends PackageOption final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption
def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = { val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) new ManifestAttributes(converted: _*) }
def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = { val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) new ManifestAttributes(converted: _*) }
def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) def mergeManifests(manifest: Manifest, mergeManifest: Manifest): Unit = { mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes)
...
44 45 46 47 48 49 50 51 52 53 54 43 44 45 46 47 48 49 50 51 52 53 54 55
case None => entryMap put (key, value) } } }
final class Configuration(val sources: Seq[(File, String)], val jar: File, val options: Seq[PackageOption])
final class Configuration(val sources: Seq[(File, String)], val jar: File, val options: Seq[PackageOption])
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = { val manifest = new Manifest val main = manifest.getMainAttributes for (option <- conf.options) { option match {
...
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
case _ => log.warn("Ignored unknown package option " + option) } } setVersion(main)
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) => import exists.format val sources :+: _ :+: manifest :+: HNil = inputs inputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) => if (inChanged || outChanged) makeJar(sources.toSeq, jar.file, manifest, log) else log.debug("Jar uptodate: " + jar.file) }
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") { (inChanged, inputs: Map[File, String] :+: FilesInfo[ModifiedFileInfo] :+: Manifest :+: HNil) => import exists.format val sources :+: _ :+: manifest :+: HNil = inputs inputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) => if (inChanged || outChanged) makeJar(sources.toSeq, jar.file, manifest, log) else log.debug("Jar uptodate: " + jar.file) }
} val map = conf.sources.toMap val inputs = map :+: lastModified(map.keySet) :+: manifest :+: HNil cachedMakeJar(inputs)(exists(conf.jar))
...
78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
def setVersion(main: Attributes): Unit = { val version = Attributes.Name.MANIFEST_VERSION if (main.getValue(version) eq null) main.put(version, "1.0") }
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = { import Attributes.Name._ val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) val attribVals = Seq(name, version, orgName) ManifestAttributes(attribKeys zip attribVals: _*) } def addImplManifestAttributes(name: String, version: String, homepage: Option[java.net.URL], org: String, orgName: String): PackageOption = { import Attributes.Name._ val attribKeys = Seq(IMPLEMENTATION_TITLE, IMPLEMENTATION_VERSION, IMPLEMENTATION_VENDOR, IMPLEMENTATION_VENDOR_ID) val attribVals = Seq(name, version, orgName, org) ManifestAttributes((attribKeys zip attribVals) ++ { homepage map (h => (IMPLEMENTATION_URL, h.toString)) }: _*) }
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = { import Attributes.Name._ val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) val attribVals = Seq(name, version, orgName) ManifestAttributes(attribKeys zip attribVals: _*) } def addImplManifestAttributes(name: String, version: String, homepage: Option[java.net.URL], org: String, orgName: String): PackageOption = { import Attributes.Name._ val attribKeys = Seq(IMPLEMENTATION_TITLE, IMPLEMENTATION_VERSION, IMPLEMENTATION_VENDOR, IMPLEMENTATION_VENDOR_ID) val attribVals = Seq(name, version, orgName, org) ManifestAttributes((attribKeys zip attribVals) ++ { homepage map (h => (IMPLEMENTATION_URL, h.toString)) }: _*) }
def makeJar(sources: Seq[(File, String)], jar: File, manifest: Manifest, log: Logger): Unit = { val path = jar.getAbsolutePath log.info("Packaging " + path + " ...") if (jar.exists) if (jar.isFile)

21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
import sbt.internal.util.ManagedLogger object RawCompileLike { type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = { @annotation.tailrec def loop(opt: List[String], result: List[File]): List[File] = { opt.dropWhile(!fileInputOpts.contains(_)) match { case List(_, fileOpt, tail @ _*) => { val file = new File(fileOpt) if (file.isFile) loop(tail.toList, file :: result) else loop(tail.toList, result) } case Nil | List(_) => result
private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] = { @annotation.tailrec def loop(opt: List[String], result: List[File]): List[File] = { opt.dropWhile(!fileInputOpts.contains(_)) match { case List(_, fileOpt, tail @ _*) => { val file = new File(fileOpt) if (file.isFile) loop(tail.toList, file :: result) else loop(tail.toList, result)
}
case Nil | List(_) => result
}
loop(options.toList, Nil)
}
loop(options.toList, Nil) }
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen = cached(cacheStoreFactory, Seq(), doCompile) def cached(cacheStoreFactory: CacheStoreFactory, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => { type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[String] :+: Int :+: HNil val inputs: Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified(classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
def cached(cacheStoreFactory: CacheStoreFactory, doCompile: Gen): Gen = cached(cacheStoreFactory, Seq(), doCompile) def cached(cacheStoreFactory: CacheStoreFactory, fileInputOpts: Seq[String], doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => { type Inputs = FilesInfo[HashFileInfo] :+: FilesInfo[ModifiedFileInfo] :+: Seq[File] :+: File :+: Seq[ String] :+: Int :+: HNil val inputs : Inputs = hash(sources.toSet ++ optionFiles(options, fileInputOpts)) :+: lastModified( classpath.toSet) :+: classpath :+: outputDirectory :+: options :+: maxErrors :+: HNil
implicit val stringEquiv: Equiv[String] = defaultEquiv implicit val fileEquiv: Equiv[File] = defaultEquiv implicit val intEquiv: Equiv[Int] = defaultEquiv val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => if (inChanged || outChanged) doCompile(sources, classpath, outputDirectory, options, maxErrors, log) else log.debug("Uptodate: " + outputDirectory.getAbsolutePath)
inputChanged(cacheStoreFactory make "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) => if (inChanged || outChanged) doCompile(sources, classpath, outputDirectory, options, maxErrors, log) else log.debug("Uptodate: " + outputDirectory.getAbsolutePath)
} } cachedComp(inputs)(exists(outputDirectory.allPaths.get.toSet)) }
def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => {
def prepare(description: String, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => {
if (sources.isEmpty) log.info("No sources available, skipping " + description + "...") else { log.info(description.capitalize + " to " + outputDirectory.absolutePath + "...") IO.delete(outputDirectory) IO.createDirectory(outputDirectory) doCompile(sources, classpath, outputDirectory, options, maxErrors, log) log.info(description.capitalize + " successful.") } }
def filterSources(f: File => Boolean, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log)
def filterSources(f: File => Boolean, doCompile: Gen): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => doCompile(sources filter f, classpath, outputDirectory, options, maxErrors, log)
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => {
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => {
val compiler = new RawCompiler(instance, cpOptions, log) compiler(sources, classpath, outputDirectory, options) }
def compile(label: String, cacheStoreFactory: CacheStoreFactory, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
def compile(label: String, cacheStoreFactory: CacheStoreFactory, instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
cached(cacheStoreFactory, prepare(label + " sources", rawCompile(instance, cpOptions))) val nop: Gen = (sources, classpath, outputDirectory, options, maxErrors, log) => () }

24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
* Warning: It is therefore inappropriate to use this with anything other than an automatically managed destination or a dedicated target directory. * Warning: Specifically, don't mix this with a directory containing manually created files, like sources. * It is safe to use for its intended purpose: copying resources to a class output directory. */ object Sync {
def apply(store: CacheStore, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists): Traversable[(File, File)] => Relation[File, File] = mappings => { val relation = Relation.empty ++ mappings noDuplicateTargets(relation) val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap
def apply(store: CacheStore, inStyle: FileInfo.Style = FileInfo.lastModified, outStyle: FileInfo.Style = FileInfo.exists) : Traversable[(File, File)] => Relation[File, File] = mappings => { val relation = Relation.empty ++ mappings noDuplicateTargets(relation) val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap
val (previousRelation, previousInfo) = readInfo(store)(inStyle.format) val removeTargets = previousRelation._2s -- relation._2s
val (previousRelation, previousInfo) = readInfo(store)(inStyle.format) val removeTargets = previousRelation._2s -- relation._2s
def outofdate(source: File, target: File): Boolean = !previousRelation.contains(source, target) || (previousInfo get source) != (currentInfo get source) || !target.exists || target.isDirectory != source.isDirectory
def outofdate(source: File, target: File): Boolean = !previousRelation.contains(source, target) || (previousInfo get source) != (currentInfo get source) || !target.exists || target.isDirectory != source.isDirectory
val updates = relation filter outofdate
val updates = relation filter outofdate
val (cleanDirs, cleanFiles) = (updates._2s ++ removeTargets).partition(_.isDirectory)
val (cleanDirs, cleanFiles) = (updates._2s ++ removeTargets).partition(_.isDirectory)
IO.delete(cleanFiles) IO.deleteIfEmpty(cleanDirs) updates.all.foreach((copy _).tupled)
IO.delete(cleanFiles) IO.deleteIfEmpty(cleanDirs) updates.all.foreach((copy _).tupled)
writeInfo(store, relation, currentInfo)(inStyle.format) relation }
writeInfo(store, relation, currentInfo)(inStyle.format) relation }
def copy(source: File, target: File): Unit = if (source.isFile) IO.copyFile(source, target, true) else if (!target.exists) // we don't want to update the last modified time of an existing directory
{ IO.createDirectory(target) IO.copyLastModified(source, target) }
{ IO.createDirectory(target) IO.copyLastModified(source, target) }
def noDuplicateTargets(relation: Relation[File, File]): Unit = { val dups = relation.reverseMap.filter { case (target, srcs) => srcs.size >= 2 && srcs.exists(!_.isDirectory)
...
73 74 75 76 77 78 79 80 81 82 83 75 76 77 78 79 80 81 82 83 84 85 86
} if (dups.nonEmpty) sys.error("Duplicate mappings:" + dups.mkString) }
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]], bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
implicit def relationFormat[A, B](implicit af: JsonFormat[Map[A, Set[B]]], bf: JsonFormat[Map[B, Set[A]]]): JsonFormat[Relation[A, B]] =
new JsonFormat[Relation[A, B]] { def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Relation[A, B] = jsOpt match { case Some(js) => unbuilder.beginArray(js)
...
96 97 98 99 100 101 102 103 104 105 106 107 108 109 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
builder.endArray() } }
def writeInfo[F <: FileInfo](store: CacheStore, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
def writeInfo[F <: FileInfo](store: CacheStore, relation: Relation[File, File], info: Map[File, F])(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info)) type RelationInfo[F] = (Relation[File, File], Map[File, F])
def readInfo[F <: FileInfo](store: CacheStore)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
def readInfo[F <: FileInfo](store: CacheStore)( implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F])) }

22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38
* @param taskName The task about which we are logging. Eg. "my-module-b/test:test" */ def run(log: Logger, results: Output, taskName: String): Unit /** Only allow invocation if certain criteria is met, else use another `TestResultLogger` (defaulting to nothing) . */
final def onlyIf(f: (Output, String) => Boolean, otherwise: TestResultLogger = TestResultLogger.Null) =
final def onlyIf(f: (Output, String) => Boolean, otherwise: TestResultLogger = TestResultLogger.Null) =
TestResultLogger.choose(f, this, otherwise) /** Allow invocation unless a certain predicate passes, in which case use another `TestResultLogger` (defaulting to nothing) . */
final def unless(f: (Output, String) => Boolean, otherwise: TestResultLogger = TestResultLogger.Null) =
final def unless(f: (Output, String) => Boolean, otherwise: TestResultLogger = TestResultLogger.Null) =
TestResultLogger.choose(f, otherwise, this) } object TestResultLogger {
...
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153
results => // Print the standard one-liner statistic if no framework summary is defined, or when > 1 framework is in used. results.summaries.size > 1 || results.summaries.headOption.forall(_.summaryText.isEmpty) val printStandard = TestResultLogger((log, results, _) => {
val (skippedCount, errorsCount, passedCount, failuresCount, ignoredCount, canceledCount, pendingCount) =
val (skippedCount, errorsCount, passedCount, failuresCount, ignoredCount, canceledCount, pendingCount) =
results.events.foldLeft((0, 0, 0, 0, 0, 0, 0)) {
case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc), (name, testEvent)) => (skippedAcc + testEvent.skippedCount, errorAcc + testEvent.errorCount, passedAcc + testEvent.passedCount, failureAcc + testEvent.failureCount, ignoredAcc + testEvent.ignoredCount, canceledAcc + testEvent.canceledCount, pendingAcc + testEvent.pendingCount)
case ((skippedAcc, errorAcc, passedAcc, failureAcc, ignoredAcc, canceledAcc, pendingAcc), (name, testEvent)) => (skippedAcc + testEvent.skippedCount, errorAcc + testEvent.errorCount, passedAcc + testEvent.passedCount, failureAcc + testEvent.failureCount, ignoredAcc + testEvent.ignoredCount, canceledAcc + testEvent.canceledCount, pendingAcc + testEvent.pendingCount)
} val totalCount = failuresCount + errorsCount + skippedCount + passedCount
val base = s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount"
val base = s"Total $totalCount, Failed $failuresCount, Errors $errorsCount, Passed $passedCount"
val otherCounts = Seq("Skipped" -> skippedCount, "Ignored" -> ignoredCount, "Canceled" -> canceledCount, "Pending" -> pendingCount)
val otherCounts = Seq("Skipped" -> skippedCount, "Ignored" -> ignoredCount, "Canceled" -> canceledCount, "Pending" -> pendingCount)
val extra = otherCounts.filter(_._2 > 0).map { case (label, count) => s", $label $count" } val postfix = base + extra.mkString results.overall match { case TestResult.Error => log.error("Error: " + postfix)
...
153 154 155 156 157 158 159 160 161 171 172 173 174 175 176 177 178 179
show("Passed tests:", Level.Debug, select(TestResult.Passed)) show("Failed tests:", Level.Error, select(TestResult.Failed)) show("Error during tests:", Level.Error, select(TestResult.Error)) })
val printNoTests = TestResultLogger((log, results, taskName) => log.info("No tests to run for " + taskName))
val printNoTests = TestResultLogger( (log, results, taskName) => log.info("No tests to run for " + taskName))
} }

10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
import sbt.internal.util.FeedbackProvidedException import xsbti.api.Definition import xsbti.compile.CompileAnalysis import ConcurrentRestrictions.Tag
import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask }
import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask }
import scala.annotation.tailrec import sbt.internal.util.ManagedLogger import sbt.util.Logger import sbt.protocol.testing.TestResult sealed trait TestOption object Tests {
/** * The result of a test run. * * @param overall The overall result of execution across all tests for all test frameworks in this test run. * @param events The result of each test group (suite) executed during this test run. * @param summaries Explicit summaries directly provided by test frameworks. This may be empty, in which case a default summary will be generated. */
final case class Output(overall: TestResult, events: Map[String, SuiteResult], summaries: Iterable[Summary])
final case class Output(overall: TestResult, events: Map[String, SuiteResult], summaries: Iterable[Summary])
/** * Summarizes a test run. * * @param name The name of the test framework providing this summary.
...
86 87 88 89 90 91 92 93 94 95 96 98 99 100 101 102 103 104 105 106 107 108 109
* * @param framework The test framework the arguments apply to if one is specified in Some. * If None, the arguments will apply to all test frameworks. * @param args The list of arguments to pass to the selected framework(s). */
final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption
final case class Argument(framework: Option[TestFramework], args: List[String]) extends TestOption
/** * Configures test execution. * * @param options The options to apply to this execution, including test framework arguments, filters,
...
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263
/** A named group of tests configured to run in the same JVM or be forked. */ final case class Group(name: String, tests: Seq[TestDefinition], runPolicy: TestRunPolicy) private[sbt] final class ProcessedOptions(
val tests: Seq[TestDefinition], val setup: Seq[ClassLoader => Unit], val cleanup: Seq[ClassLoader => Unit], val testListeners: Seq[TestReportListener]
val tests: Seq[TestDefinition], val setup: Seq[ClassLoader => Unit], val cleanup: Seq[ClassLoader => Unit], val testListeners: Seq[TestReportListener]
)
private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions = { import collection.mutable.{ HashSet, ListBuffer } val testFilters = new ListBuffer[String => Boolean] var orderedFilters = Seq[String => Boolean]() val excludeTestsSet = new HashSet[String] val setup, cleanup = new ListBuffer[ClassLoader => Unit] val testListeners = new ListBuffer[TestReportListener] val undefinedFrameworks = new ListBuffer[String]
private[sbt] def processOptions(config: Execution, discovered: Seq[TestDefinition], log: Logger): ProcessedOptions = { import collection.mutable.{ HashSet, ListBuffer } val testFilters = new ListBuffer[String => Boolean] var orderedFilters = Seq[String => Boolean]() val excludeTestsSet = new HashSet[String] val setup, cleanup = new ListBuffer[ClassLoader => Unit] val testListeners = new ListBuffer[TestReportListener] val undefinedFrameworks = new ListBuffer[String]
for (option <- config.options) { option match { case Filter(include) => testFilters += include case Filters(includes) => if (orderedFilters.nonEmpty) sys.error("Cannot define multiple ordered test filters.") else orderedFilters = includes case Exclude(exclude) => excludeTestsSet ++= exclude case Listeners(listeners) => testListeners ++= listeners case Setup(setupFunction) => setup += setupFunction case Cleanup(cleanupFunction) => cleanup += cleanupFunction case a: Argument => // now handled by whatever constructs `runners` } } if (excludeTestsSet.nonEmpty) log.debug(excludeTestsSet.mkString("Excluding tests: \n\t", "\n\t", "")) if (undefinedFrameworks.nonEmpty) log.warn("Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks.mkString("\n\t")) def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name)) val filtered0 = discovered.filter(includeTest).toList.distinct val tests = if (orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct val uniqueTests = distinctBy(tests)(_.name) new ProcessedOptions(uniqueTests, setup.toList, cleanup.toList, testListeners.toList) } private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = { val seen = new collection.mutable.HashSet[K] in.filter(t => seen.add(f(t))) } def apply(frameworks: Map[TestFramework, Framework], testLoader: ClassLoader, runners: Map[TestFramework, Runner], discovered: Seq[TestDefinition], config: Execution, log: ManagedLogger): Task[Output] = { val o = processOptions(config, discovered, log) testTask(testLoader, frameworks, runners, o.tests, o.setup, o.cleanup, log, o.testListeners, config) } def testTask(loader: ClassLoader, frameworks: Map[TestFramework, Framework], runners: Map[TestFramework, Runner], tests: Seq[TestDefinition], userSetup: Iterable[ClassLoader => Unit], userCleanup: Iterable[ClassLoader => Unit], log: ManagedLogger, testListeners: Seq[TestReportListener], config: Execution): Task[Output] = { def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*) def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () => a(loader) } val (frameworkSetup, runnables, frameworkCleanup) = TestFramework.testTasks(frameworks, runners, loader, tests, log, testListeners) val setupTasks = fj(partApp(userSetup) :+ frameworkSetup) val mainTasks = if (config.parallel) makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join else makeSerial(loader, runnables, setupTasks, config.tags) val taggedMainTasks = mainTasks.tagw(config.tags: _*) taggedMainTasks map processResults flatMap { results => val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall)) cleanupTasks map { _ => results }
for (option <- config.options) { option match { case Filter(include) => testFilters += include case Filters(includes) => if (orderedFilters.nonEmpty) sys.error("Cannot define multiple ordered test filters.") else orderedFilters = includes case Exclude(exclude) => excludeTestsSet ++= exclude case Listeners(listeners) => testListeners ++= listeners case Setup(setupFunction) => setup += setupFunction case Cleanup(cleanupFunction) => cleanup += cleanupFunction case a: Argument => // now handled by whatever constructs `runners`
} }
if (excludeTestsSet.nonEmpty) log.debug(excludeTestsSet.mkString("Excluding tests: \n\t", "\n\t", "")) if (undefinedFrameworks.nonEmpty) log.warn( "Arguments defined for test frameworks that are not present:\n\t" + undefinedFrameworks .mkString("\n\t")) def includeTest(test: TestDefinition) = !excludeTestsSet.contains(test.name) && testFilters.forall(filter => filter(test.name)) val filtered0 = discovered.filter(includeTest).toList.distinct val tests = if (orderedFilters.isEmpty) filtered0 else orderedFilters.flatMap(f => filtered0.filter(d => f(d.name))).toList.distinct val uniqueTests = distinctBy(tests)(_.name) new ProcessedOptions(uniqueTests, setup.toList, cleanup.toList, testListeners.toList) } private[this] def distinctBy[T, K](in: Seq[T])(f: T => K): Seq[T] = { val seen = new collection.mutable.HashSet[K] in.filter(t => seen.add(f(t))) } def apply(frameworks: Map[TestFramework, Framework], testLoader: ClassLoader, runners: Map[TestFramework, Runner], discovered: Seq[TestDefinition], config: Execution, log: ManagedLogger): Task[Output] = { val o = processOptions(config, discovered, log) testTask(testLoader, frameworks, runners, o.tests, o.setup, o.cleanup, log, o.testListeners, config) } def testTask(loader: ClassLoader, frameworks: Map[TestFramework, Framework], runners: Map[TestFramework, Runner], tests: Seq[TestDefinition], userSetup: Iterable[ClassLoader => Unit], userCleanup: Iterable[ClassLoader => Unit], log: ManagedLogger, testListeners: Seq[TestReportListener], config: Execution): Task[Output] = { def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*) def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () => a(loader) } val (frameworkSetup, runnables, frameworkCleanup) = TestFramework.testTasks(frameworks, runners, loader, tests, log, testListeners) val setupTasks = fj(partApp(userSetup) :+ frameworkSetup) val mainTasks = if (config.parallel) makeParallel(loader, runnables, setupTasks, config.tags) //.toSeq.join else makeSerial(loader, runnables, setupTasks, config.tags) val taggedMainTasks = mainTasks.tagw(config.tags: _*) taggedMainTasks map processResults flatMap { results => val cleanupTasks = fj(partApp(userCleanup) :+ frameworkCleanup(results.overall)) cleanupTasks map { _ => results } } }
type TestRunnable = (String, TestFunction)
private def createNestedRunnables(loader: ClassLoader, testFun: TestFunction, nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
private def createNestedRunnables(loader: ClassLoader, testFun: TestFunction, nestedTasks: Seq[TestTask]): Seq[(String, TestFunction)] =
nestedTasks.view.zipWithIndex map { case (nt, idx) => val testFunDef = testFun.taskDef
(testFunDef.fullyQualifiedName, TestFramework.createTestFunction(loader, new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, testFunDef.fingerprint, testFunDef.explicitlySpecified, testFunDef.selectors), testFun.runner, nt))
(testFunDef.fullyQualifiedName, TestFramework.createTestFunction(loader, new TaskDef(testFunDef.fullyQualifiedName + "-" + idx, testFunDef.fingerprint, testFunDef.explicitlySpecified, testFunDef.selectors), testFun.runner, nt))
}
def makeParallel(loader: ClassLoader, runnables: Iterable[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
def makeParallel(loader: ClassLoader, runnables: Iterable[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] =
toTasks(loader, runnables.toSeq, tags).dependsOn(setupTasks)
def toTasks(loader: ClassLoader, runnables: Seq[TestRunnable], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTasks(loader: ClassLoader, runnables: Seq[TestRunnable], tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
val tasks = runnables.map { case (name, test) => toTask(loader, name, test, tags) } tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) { case (sum, e) => val merged = sum.toSeq ++ e.toSeq val grouped = merged.groupBy(_._1)
...
210 211 212 213 214 215 216 217 218 219 220 265 266 267 268 269 270 271 272 273 274 275 276 277 278
case (resultSum, result) => resultSum + result }) }) }
def toTask(loader: ClassLoader, name: String, fun: TestFunction, tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
def toTask(loader: ClassLoader, name: String, fun: TestFunction, tags: Seq[(Tag, Int)]): Task[Map[String, SuiteResult]] = {
val base = task { (name, fun.apply()) } val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*) taggedBase flatMap { case (name, (result, nested)) => val nestedRunnables = createNestedRunnables(loader, fun, nested)
...
227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312
currentResultMap.updated(name, newResult) } } }
def makeSerial(loader: ClassLoader, runnables: Seq[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = { @tailrec def processRunnable(runnableList: List[TestRunnable], acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] = runnableList match { case hd :: rst => val testFun = hd._2 val (result, nestedTasks) = testFun.apply() val nestedRunnables = createNestedRunnables(loader, testFun, nestedTasks) processRunnable(nestedRunnables.toList ::: rst, (hd._1, result) :: acc) case Nil => acc }
def makeSerial(loader: ClassLoader, runnables: Seq[TestRunnable], setupTasks: Task[Unit], tags: Seq[(Tag, Int)]): Task[List[(String, SuiteResult)]] = { @tailrec def processRunnable(runnableList: List[TestRunnable], acc: List[(String, SuiteResult)]): List[(String, SuiteResult)] = runnableList match { case hd :: rst => val testFun = hd._2 val (result, nestedTasks) = testFun.apply() val nestedRunnables = createNestedRunnables(loader, testFun, nestedTasks) processRunnable(nestedRunnables.toList ::: rst, (hd._1, result) :: acc) case Nil => acc }
task { processRunnable(runnables.toList, List.empty) } dependsOn (setupTasks) }
task { processRunnable(runnables.toList, List.empty) } dependsOn (setupTasks) }
def processResults(results: Iterable[(String, SuiteResult)]): Output = Output(overall(results.map(_._2.result)), results.toMap, Iterable.empty) private def severity(r: TestResult): Int =
...
255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353
case TestResult.Error => 2 } def foldTasks(results: Seq[Task[Output]], parallel: Boolean): Task[Output] = if (results.isEmpty)
task { Output(TestResult.Passed, Map.empty, Nil) } else if (parallel)
task { Output(TestResult.Passed, Map.empty, Nil) } else if (parallel)
reduced(results.toIndexedSeq, {
case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (severity(v1) < severity(v2)) v2 else v1, m1 ++ m2, Iterable.empty)
case (Output(v1, m1, _), Output(v2, m2, _)) => Output(if (severity(v1) < severity(v2)) v2 else v1, m1 ++ m2, Iterable.empty)
}) else {
def sequence(tasks: List[Task[Output]], acc: List[Output]): Task[List[Output]] = tasks match { case Nil => task(acc.reverse) case hd :: tl => hd flatMap { out => sequence(tl, out :: acc) } }
def sequence(tasks: List[Task[Output]], acc: List[Output]): Task[List[Output]] = tasks match { case Nil => task(acc.reverse) case hd :: tl => hd flatMap { out => sequence(tl, out :: acc) } }
sequence(results.toList, List()) map { ress =>
val (rs, ms) = ress.unzip { e => (e.overall, e.events) }
val (rs, ms) = ress.unzip { e => (e.overall, e.events) }
Output(overall(rs), ms reduce (_ ++ _), Iterable.empty) } } def overall(results: Iterable[TestResult]): TestResult =
((TestResult.Passed: TestResult) /: results) { (acc, result) => if (severity(acc) < severity(result)) result else acc } def discover(frameworks: Seq[Framework], analysis: CompileAnalysis, log: Logger): (Seq[TestDefinition], Set[String]) =
((TestResult.Passed: TestResult) /: results) { (acc, result) => if (severity(acc) < severity(result)) result else acc } def discover(frameworks: Seq[Framework], analysis: CompileAnalysis, log: Logger): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log) def allDefs(analysis: CompileAnalysis) = analysis match { case analysis: Analysis => val acs: Seq[xsbti.api.AnalyzedClass] = analysis.apis.internal.values.toVector
...
288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397
companions.objectApi.structure.declared ++ companions.objectApi.structure.inherited all }.toSeq }
def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) = { val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) }; val annotations = fingerprints collect { case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann) }; log.debug("Subclass fingerprints: " + subclasses) log.debug("Annotation fingerprints: " + annotations)
def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) = { val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) }; val annotations = fingerprints collect { case ann: AnnotatedFingerprint => (ann.annotationName, ann.isModule, ann) }; log.debug("Subclass fingerprints: " + subclasses) log.debug("Annotation fingerprints: " + annotations)
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet def defined(in: Seq[(String, Boolean, Fingerprint)], names: Set[String], IsModule: Boolean): Seq[Fingerprint] = in collect { case (name, IsModule, print) if names(name) => print }
def firsts[A, B, C](s: Seq[(A, B, C)]): Set[A] = s.map(_._1).toSet def defined(in: Seq[(String, Boolean, Fingerprint)], names: Set[String], IsModule: Boolean): Seq[Fingerprint] = in collect { case (name, IsModule, print) if names(name) => print }
def toFingerprints(d: Discovered): Seq[Fingerprint] = defined(subclasses, d.baseClasses, d.isModule) ++ defined(annotations, d.annotations, d.isModule)
def toFingerprints(d: Discovered): Seq[Fingerprint] = defined(subclasses, d.baseClasses, d.isModule) ++ defined(annotations, d.annotations, d.isModule)
val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions) // TODO: To pass in correct explicitlySpecified and selectors val tests = for ((df, di) <- discovered; fingerprint <- toFingerprints(di)) yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector)) val mains = discovered collect { case (df, di) if di.hasMain => df.name } (tests, mains.toSet) }
val discovered = Discovery(firsts(subclasses), firsts(annotations))(definitions) // TODO: To pass in correct explicitlySpecified and selectors val tests = for ((df, di) <- discovered; fingerprint <- toFingerprints(di)) yield new TestDefinition(df.name, fingerprint, false, Array(new SuiteSelector)) val mains = discovered collect { case (df, di) if di.hasMain => df.name } (tests, mains.toSet) }
}
final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException
final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException

24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82
* The value may be obtained from `getValue` by providing a parent class loader that provides the classes from the classpath * this expression was compiled against. Each call to `getValue` constructs a new class loader and loads * the module from that class loader. `generated` contains the compiled classes and cache files related * to the expression. The name of the auto-generated module wrapping the expression is `enclosingModule`. */
final class EvalResult(val tpe: String, val getValue: ClassLoader => Any, val generated: Seq[File], val enclosingModule: String)
final class EvalResult(val tpe: String, val getValue: ClassLoader => Any, val generated: Seq[File], val enclosingModule: String)
/** * The result of evaluating a group of Scala definitions. The definitions are wrapped in an auto-generated, * top-level module named `enclosingModule`. `generated` contains the compiled classes and cache files related to the definitions. * A new class loader containing the module may be obtained from `loader` by passing the parent class loader providing the classes * from the classpath that the definitions were compiled against. The list of vals with the requested types is `valNames`. * The values for these may be obtained by providing the parent class loader to `values` as is done with `loader`. */
final class EvalDefinitions(val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String, val valNames: Seq[String]) {
final class EvalDefinitions(val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String, val valNames: Seq[String]) {
def values(parent: ClassLoader): Seq[Any] = { val module = getModule(enclosingModule, loader(parent)) for (n <- valNames) yield module.getClass.getMethod(n).invoke(module) } } final class EvalException(msg: String) extends RuntimeException(msg) // not thread safe, since it reuses a Global instance
final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Settings => Reporter, backing: Option[File]) { def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing)
final class Eval(optionsNoncp: Seq[String], classpath: Seq[File], mkReporter: Settings => Reporter, backing: Option[File]) { def this(mkReporter: Settings => Reporter, backing: Option[File]) = this(Nil, IO.classLocationFile[Product] :: Nil, mkReporter, backing)
def this() = this(s => new ConsoleReporter(s), None) backing.foreach(IO.createDirectory) val classpathString = Path.makeString(classpath ++ backing.toList) val options = "-cp" +: classpathString +: optionsNoncp
lazy val settings = { val s = new Settings(println) new CompilerCommand(options.toList, s) // this side-effects on Settings.. s }
lazy val settings = { val s = new Settings(println) new CompilerCommand(options.toList, s) // this side-effects on Settings.. s }
lazy val reporter = mkReporter(settings)
/** * Subclass of Global which allows us to mutate currentRun from outside. * See for rationale https://issues.scala-lang.org/browse/SI-8794 */
final class EvalGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
final class EvalGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
override def currentRun: Run = curRun var curRun: Run = null } lazy val global: EvalGlobal = new EvalGlobal(settings, reporter) import global._
...
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305
} private[this] var toUnlinkLater = List[Symbol]() private[this] def unlink(sym: Symbol) = sym.owner.info.decls.unlink(sym)
def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "<setting>", line: Int = DefaultStartLine): EvalResult = { val ev = new EvalType[String] { def makeUnit = mkUnit(srcName, line, expression) def unlink = true def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { val (parser, tree) = parse(unit, settingErrorStrings, _.expr()) val tpt: Tree = expectedType(tpeName) augment(parser, importTrees, tree, tpt, moduleName) } def extra(run: Run, unit: CompilationUnit) = enteringPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) } def read(file: File) = IO.read(file) def write(value: String, f: File) = IO.write(f, value) def extraHash = ""
def eval(expression: String, imports: EvalImports = noImports, tpeName: Option[String] = None, srcName: String = "<setting>", line: Int = DefaultStartLine): EvalResult = { val ev = new EvalType[String] { def makeUnit = mkUnit(srcName, line, expression) def unlink = true def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { val (parser, tree) = parse(unit, settingErrorStrings, _.expr()) val tpt: Tree = expectedType(tpeName) augment(parser, importTrees, tree, tpt, moduleName)
}
val i = evalCommon(expression :: Nil, imports, tpeName, ev) val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) new EvalResult(i.extra, value, i.generated, i.enclosingModule)
def extra(run: Run, unit: CompilationUnit) = enteringPhase(run.typerPhase.next) { (new TypeExtractor).getType(unit.body) } def read(file: File) = IO.read(file) def write(value: String, f: File) = IO.write(f, value) def extraHash = ""
}
def evalDefinitions(definitions: Seq[(String, scala.Range)], imports: EvalImports, srcName: String, file: Option[File], valTypes: Seq[String]): EvalDefinitions = { require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") val ev = new EvalType[Seq[String]] { lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions) def makeUnit = fullUnit def unlink = false def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { val fullParser = new syntaxAnalyzer.UnitParser(unit) val trees = defUnits flatMap parseDefinitions syntheticModule(fullParser, importTrees, trees.toList, moduleName) } def extra(run: Run, unit: CompilationUnit) = { enteringPhase(run.typerPhase.next) { (new ValExtractor(valTypes.toSet)).getVals(unit.body) } } def read(file: File) = IO.readLines(file) def write(value: Seq[String], file: File) = IO.writeLines(file, value) def extraHash = file match { case Some(f) => f.getAbsolutePath case None => ""
val i = evalCommon(expression :: Nil, imports, tpeName, ev) val value = (cl: ClassLoader) => getValue[Any](i.enclosingModule, i.loader(cl)) new EvalResult(i.extra, value, i.generated, i.enclosingModule) } def evalDefinitions(definitions: Seq[(String, scala.Range)], imports: EvalImports, srcName: String, file: Option[File], valTypes: Seq[String]): EvalDefinitions = { require(definitions.nonEmpty, "Definitions to evaluate cannot be empty.") val ev = new EvalType[Seq[String]] { lazy val (fullUnit, defUnits) = mkDefsUnit(srcName, definitions) def makeUnit = fullUnit def unlink = false def unitBody(unit: CompilationUnit, importTrees: Seq[Tree], moduleName: String): Tree = { val fullParser = new syntaxAnalyzer.UnitParser(unit) val trees = defUnits flatMap parseDefinitions syntheticModule(fullParser, importTrees, trees.toList, moduleName) } def extra(run: Run, unit: CompilationUnit) = { enteringPhase(run.typerPhase.next) { (new ValExtractor(valTypes.toSet)).getVals(unit.body)
} }
val i = evalCommon(definitions.map(_._1), imports, Some(""), ev) new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra)
def read(file: File) = IO.readLines(file) def write(value: Seq[String], file: File) = IO.writeLines(file, value) def extraHash = file match { case Some(f) => f.getAbsolutePath case None => "" } } val i = evalCommon(definitions.map(_._1), imports, Some(""), ev) new EvalDefinitions(i.loader, i.generated, i.enclosingModule, i.extra) } private[this] def evalCommon[T](content: Seq[String], imports: EvalImports, tpeName: Option[String], ev: EvalType[T]): EvalIntermediate[T] = { import Eval._ // TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting // is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous // value on the classpath when compiling. val hash = Hash.toHex( Hash(bytes( stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) :: seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes( tpeName)(bytes) :: bytes(ev.extraHash) :: Nil))) val moduleName = makeModuleName(hash) lazy val unit = { reporter.reset ev.makeUnit } lazy val run = new Run { override def units = (unit :: Nil).iterator } def unlinkAll(): Unit = for ((sym, _) <- run.symSource) if (ev.unlink) unlink(sym) else toUnlinkLater ::= sym val (extra, loader) = backing match { case Some(back) if classExists(back, moduleName) => val loader = (parent: ClassLoader) => new URLClassLoader(Array(back.toURI.toURL), parent) val extra = ev.read(cacheFile(back, moduleName)) (extra, loader) case _ => try { compileAndLoad(run, unit, imports, backing, moduleName, ev) } finally { unlinkAll() }
}
private[this] def evalCommon[T](content: Seq[String], imports: EvalImports, tpeName: Option[String], ev: EvalType[T]): EvalIntermediate[T] = { import Eval._ // TODO - We also encode the source of the setting into the hash to avoid conflicts where the exact SAME setting // is defined in multiple evaluated instances with a backing. This leads to issues with finding a previous // value on the classpath when compiling. val hash = Hash.toHex(Hash(bytes(stringSeqBytes(content) :: optBytes(backing)(fileExistsBytes) :: stringSeqBytes(options) :: seqBytes(classpath)(fileModifiedBytes) :: stringSeqBytes(imports.strings.map(_._1)) :: optBytes(tpeName)(bytes) :: bytes(ev.extraHash) :: Nil))) val moduleName = makeModuleName(hash) lazy val unit = { reporter.reset ev.makeUnit } lazy val run = new Run { override def units = (unit :: Nil).iterator } def unlinkAll(): Unit = for ((sym, _) <- run.symSource) if (ev.unlink) unlink(sym) else toUnlinkLater ::= sym val (extra, loader) = backing match { case Some(back) if classExists(back, moduleName) => val loader = (parent: ClassLoader) => new URLClassLoader(Array(back.toURI.toURL), parent) val extra = ev.read(cacheFile(back, moduleName)) (extra, loader) case _ => try { compileAndLoad(run, unit, imports, backing, moduleName, ev) } finally { unlinkAll() } } val generatedFiles = getGeneratedFiles(backing, moduleName) new EvalIntermediate(extra, loader, generatedFiles, moduleName) }
val generatedFiles = getGeneratedFiles(backing, moduleName) new EvalIntermediate(extra, loader, generatedFiles, moduleName) }
// location of the cached type or definition information
private[this] def cacheFile(base: File, moduleName: String): File = new File(base, moduleName + ".cache") private[this] def compileAndLoad[T](run: Run, unit: CompilationUnit, imports: EvalImports, backing: Option[File], moduleName: String, ev: EvalType[T]): (T, ClassLoader => ClassLoader) = { global.curRun = run run.currentUnit = unit val dir = outputDirectory(backing) settings.outputDirs setSingleOutput dir
private[this] def cacheFile(base: File, moduleName: String): File = new File(base, moduleName + ".cache") private[this] def compileAndLoad[T](run: Run, unit: CompilationUnit, imports: EvalImports, backing: Option[File], moduleName: String, ev: EvalType[T]): (T, ClassLoader => ClassLoader) = { global.curRun = run run.currentUnit = unit val dir = outputDirectory(backing) settings.outputDirs setSingleOutput dir
val importTrees = parseImports(imports) unit.body = ev.unitBody(unit, importTrees, moduleName)
val importTrees = parseImports(imports) unit.body = ev.unitBody(unit, importTrees, moduleName)
def compile(phase: Phase): Unit = { globalPhase = phase if (phase == null || phase == phase.next || reporter.hasErrors) () else { enteringPhase(phase) { phase.run } compile(phase.next) } } compile(run.namerPhase) checkError("Type error in expression") val extra = ev.extra(run, unit) for (f <- backing) ev.write(extra, cacheFile(f, moduleName)) val loader = (parent: ClassLoader) => new AbstractFileClassLoader(dir, parent) (extra, loader)
def compile(phase: Phase): Unit = { globalPhase = phase if (phase == null || phase == phase.next || reporter.hasErrors) () else { enteringPhase(phase) { phase.run } compile(phase.next) }
}
compile(run.namerPhase) checkError("Type error in expression") val extra = ev.extra(run, unit) for (f <- backing) ev.write(extra, cacheFile(f, moduleName)) val loader = (parent: ClassLoader) => new AbstractFileClassLoader(dir, parent) (extra, loader) }
private[this] def expectedType(tpeName: Option[String]): Tree = tpeName match { case Some(tpe) => parseType(tpe) case None => TypeTree(NoType) } private[this] def outputDirectory(backing: Option[File]): AbstractFile =
backing match { case None => new VirtualDirectory("<virtual>", None); case Some(dir) => new PlainFile(dir) }
backing match { case None => new VirtualDirectory("<virtual>", None); case Some(dir) => new PlainFile(dir) }
def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent)) def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
def load(dir: AbstractFile, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new AbstractFileClassLoader(dir, parent)) def loadPlain(dir: File, moduleName: String): ClassLoader => Any = parent => getValue[Any](moduleName, new URLClassLoader(Array(dir.toURI.toURL), parent))
//wrap tree in object objectName { def WrapValName = <tree> }
def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree = { val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree) syntheticModule(parser, imports, method :: Nil, objectName) } private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], definitions: List[Tree], objectName: String): Tree = { val emptyTypeName = nme.EMPTY.toTypeName def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( NoMods, nme.CONSTRUCTOR, Nil, List(Nil), TypeTree(), Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(()))) )
def augment(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], tree: Tree, tpt: Tree, objectName: String): Tree = { val method = DefDef(NoMods, newTermName(WrapValName), Nil, Nil, tpt, tree) syntheticModule(parser, imports, method :: Nil, objectName) } private[this] def syntheticModule(parser: global.syntaxAnalyzer.UnitParser, imports: Seq[Tree], definitions: List[Tree], objectName: String): Tree = { val emptyTypeName = nme.EMPTY.toTypeName def emptyPkg = parser.atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( NoMods, nme.CONSTRUCTOR, Nil, List(Nil), TypeTree(), Block(List(Apply(Select(Super(This(emptyTypeName), emptyTypeName), nme.CONSTRUCTOR), Nil)), Literal(Constant(()))) )
def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions) def moduleDef = ModuleDef(NoMods, newTermName(objectName), moduleBody) parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList) }
def moduleBody = Template(List(gen.scalaAnyRefConstr), noSelfType, emptyInit :: definitions) def moduleDef = ModuleDef(NoMods, newTermName(objectName), moduleBody) parser.makePackaging(0, emptyPkg, (imports :+ moduleDef).toList) }
private[this] final class TypeExtractor extends Traverser { private[this] var result = "" def getType(t: Tree) = { result = ""; traverse(t); result } override def traverse(tree: Tree): Unit = tree match {
case d: DefDef if d.symbol.nameString == WrapValName => result = d.symbol.tpe.finalResultType.toString case _ => super.traverse(tree)
case d: DefDef if d.symbol.nameString == WrapValName => result = d.symbol.tpe.finalResultType.toString case _ => super.traverse(tree)
} }
/** Tree traverser that obtains the names of vals in a top-level module whose type is a subtype of one of `types`.*/ private[this] final class ValExtractor(tpes: Set[String]) extends Traverser { private[this] var vals = List[String]() def getVals(t: Tree): List[String] = { vals = Nil; traverse(t); vals } def isAcceptableType(tpe: Type): Boolean = { tpe.baseClasses.exists { sym => tpes.contains(sym.fullName) } } override def traverse(tree: Tree): Unit = tree match {
case ValDef(_, n, actualTpe, _) if isTopLevelModule(tree.symbol.owner) && isAcceptableType(actualTpe.tpe) =>
case ValDef(_, n, actualTpe, _) if isTopLevelModule(tree.symbol.owner) && isAcceptableType(actualTpe.tpe) =>
vals ::= n.dropLocal.encoded case _ => super.traverse(tree) } } // inlined implemented of Symbol.isTopLevelModule that was removed in e5b050814deb2e7e1d6d05511d3a6cb6b013b549
private[this] def isTopLevelModule(s: Symbol): Boolean = s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass
private[this] def isTopLevelModule(s: Symbol): Boolean = s.hasFlag(reflect.internal.Flags.MODULE) && s.owner.isPackageClass
private[this] final class EvalIntermediate[T](val extra: T, val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String)
private[this] final class EvalIntermediate[T](val extra: T, val loader: ClassLoader => ClassLoader, val generated: Seq[File], val enclosingModule: String)
private[this] def classExists(dir: File, name: String) = (new File(dir, name + ".class")).exists // TODO: use the code from Analyzer private[this] def getGeneratedFiles(backing: Option[File], moduleName: String): Seq[File] = backing match {
...
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393
private[this] def moduleFileFilter(moduleName: String) = new java.io.FilenameFilter { def accept(dir: File, s: String) = (s contains moduleName) }
private[this] class ParseErrorStrings(val base: String, val extraBlank: String, val missingBlank: String, val extraSemi: String)
private[this] class ParseErrorStrings(val base: String, val extraBlank: String, val missingBlank: String, val extraSemi: String)
private[this] def definitionErrorStrings = new ParseErrorStrings( base = "Error parsing definition.", extraBlank = " Ensure that there are no blank lines within a definition.", missingBlank = " Ensure that definitions are separated by blank lines.", extraSemi = " A trailing semicolon is not permitted for standalone definitions." ) private[this] def settingErrorStrings = new ParseErrorStrings( base = "Error parsing expression.", extraBlank = " Ensure that there are no blank lines within a setting.", missingBlank = " Ensure that settings are separated by blank lines.",
extraSemi = " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)"
extraSemi = " Note that settings are expressions and do not end with semicolons. (Semicolons are fine within {} blocks, however.)"
) /** * Parses the provided compilation `unit` according to `f` and then performs checks on the final parser state * to catch errors that are common when the content is embedded in a blank-line-delimited format. */
private[this] def parse[T](unit: CompilationUnit, errors: ParseErrorStrings, f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = { val parser = new syntaxAnalyzer.UnitParser(unit)
private[this] def parse[T](unit: CompilationUnit, errors: ParseErrorStrings, f: syntaxAnalyzer.UnitParser => T): (syntaxAnalyzer.UnitParser, T) = { val parser = new syntaxAnalyzer.UnitParser(unit)
val tree = f(parser) val extra = parser.in.token match { case EOF => errors.extraBlank case _ => "" } checkError(errors.base + extra) parser.accept(EOF) val extra2 = parser.in.token match { case SEMI => errors.extraSemi case NEWLINE | NEWLINES => errors.missingBlank case _ => "" } checkError(errors.base + extra2) (parser, tree)
val tree = f(parser) val extra = parser.in.token match { case EOF => errors.extraBlank case _ => ""
}
private[this] def parseType(tpe: String): Tree = { val tpeParser = new syntaxAnalyzer.UnitParser(mkUnit("<expected-type>", DefaultStartLine, tpe)) val tpt0: Tree = tpeParser.typ() tpeParser.accept(EOF) checkError("Error parsing expression type.") tpt0
checkError(errors.base + extra) parser.accept(EOF) val extra2 = parser.in.token match { case SEMI => errors.extraSemi case NEWLINE | NEWLINES => errors.missingBlank case _ => ""
}
checkError(errors.base + extra2) (parser, tree) } private[this] def parseType(tpe: String): Tree = { val tpeParser = new syntaxAnalyzer.UnitParser(mkUnit("<expected-type>", DefaultStartLine, tpe)) val tpt0: Tree = tpeParser.typ() tpeParser.accept(EOF) checkError("Error parsing expression type.") tpt0 }
private[this] def parseImports(imports: EvalImports): Seq[Tree] = imports.strings flatMap { case (s, line) => parseImport(mkUnit(imports.srcName, line, s)) }
private[this] def parseImport(importUnit: CompilationUnit): Seq[Tree] = { val parser = new syntaxAnalyzer.UnitParser(importUnit) val trees: Seq[Tree] = parser.importClause() parser.accept(EOF) checkError("Error parsing imports for expression.") trees }
private[this] def parseImport(importUnit: CompilationUnit): Seq[Tree] = { val parser = new syntaxAnalyzer.UnitParser(importUnit) val trees: Seq[Tree] = parser.importClause() parser.accept(EOF) checkError("Error parsing imports for expression.") trees }
private[this] def parseDefinitions(du: CompilationUnit): Seq[Tree] = parse(du, definitionErrorStrings, parseDefinitions)._2 /** Parses one or more definitions (defs, vals, lazy vals, classes, traits, modules). */
private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] = { val defs = ListBuffer[Tree]() do { defs ++= parser.nonLocalDefOrDcl parser.acceptStatSepOpt() } while (!parser.isStatSeqEnd) defs.toList }
private[this] def parseDefinitions(parser: syntaxAnalyzer.UnitParser): Seq[Tree] = { val defs = ListBuffer[Tree]() do { defs ++= parser.nonLocalDefOrDcl parser.acceptStatSepOpt() } while (!parser.isStatSeqEnd) defs.toList }
private[this] trait EvalType[T] {
/** Extracts additional information after the compilation unit is evaluated.*/ def extra(run: Run, unit: CompilationUnit): T /** Deserializes the extra information for unchanged inputs from a cache file.*/ def read(file: File): T
...
367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520
} val DefaultStartLine = 0 private[this] def makeModuleName(hash: String): String = "$" + Hash.halve(hash) private[this] def noImports = new EvalImports(Nil, "")
private[this] def mkUnit(srcName: String, firstLine: Int, s: String) = new CompilationUnit(new EvalSourceFile(srcName, firstLine, s)) private[this] def checkError(label: String) = if (reporter.hasErrors) throw new EvalException(label)
private[this] def mkUnit(srcName: String, firstLine: Int, s: String) = new CompilationUnit(new EvalSourceFile(srcName, firstLine, s)) private[this] def checkError(label: String) = if (reporter.hasErrors) throw new EvalException(label)
private[this] final class EvalSourceFile(name: String, startLine: Int, contents: String) extends BatchSourceFile(name, contents) {
private[this] final class EvalSourceFile(name: String, startLine: Int, contents: String) extends BatchSourceFile(name, contents) {
override def lineToOffset(line: Int): Int = super.lineToOffset((line - startLine) max 0) override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) + startLine }
/** * Constructs a CompilationUnit for each definition, which can be used to independently parse the definition into a Tree. * Additionally, a CompilationUnit for the combined definitions is constructed for use by combined compilation after parsing. */
private[this] def mkDefsUnit(srcName: String, definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = { def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))
private[this] def mkDefsUnit( srcName: String, definitions: Seq[(String, scala.Range)]): (CompilationUnit, Seq[CompilationUnit]) = { def fragmentUnit(content: String, lineMap: Array[Int]) = new CompilationUnit(fragmentSourceFile(srcName, content, lineMap))
import collection.mutable.ListBuffer val lines = new ListBuffer[Int]() val defs = new ListBuffer[CompilationUnit]() val fullContent = new java.lang.StringBuilder() for ((defString, range) <- definitions) { defs += fragmentUnit(defString, range.toArray) fullContent.append(defString) lines ++= range fullContent.append("\n\n") lines ++= (range.end :: range.end :: Nil) } val fullUnit = fragmentUnit(fullContent.toString, lines.toArray) (fullUnit, defs.toSeq)
import collection.mutable.ListBuffer val lines = new ListBuffer[Int]() val defs = new ListBuffer[CompilationUnit]() val fullContent = new java.lang.StringBuilder() for ((defString, range) <- definitions) { defs += fragmentUnit(defString, range.toArray) fullContent.append(defString) lines ++= range fullContent.append("\n\n") lines ++= (range.end :: range.end :: Nil)
}
val fullUnit = fragmentUnit(fullContent.toString, lines.toArray) (fullUnit, defs.toSeq) }
/** * Source file that can map the offset in the file to and from line numbers that may discontinuous. * The values in `lineMap` must be ordered, but need not be consecutive. */
private[this] def fragmentSourceFile(srcName: String, content: String, lineMap: Array[Int]) = new BatchSourceFile(srcName, content) { override def lineToOffset(line: Int): Int = super.lineToOffset(lineMap.indexWhere(_ == line) max 0) override def offsetToLine(offset: Int): Int = index(lineMap, super.offsetToLine(offset)) // the SourceFile attribute is populated from this method, so we are required to only return the name override def toString = new File(srcName).getName private[this] def index(a: Array[Int], i: Int): Int = if (i < 0 || i >= a.length) 0 else a(i) }
private[this] def fragmentSourceFile(srcName: String, content: String, lineMap: Array[Int]) = new BatchSourceFile(srcName, content) { override def lineToOffset(line: Int): Int = super.lineToOffset(lineMap.indexWhere(_ == line) max 0) override def offsetToLine(offset: Int): Int = index(lineMap, super.offsetToLine(offset)) // the SourceFile attribute is populated from this method, so we are required to only return the name override def toString = new File(srcName).getName private[this] def index(a: Array[Int], i: Int): Int = if (i < 0 || i >= a.length) 0 else a(i) }
} private[sbt] object Eval { def optBytes[T](o: Option[T])(f: T => Array[Byte]): Array[Byte] = seqBytes(o.toSeq)(f) def stringSeqBytes(s: Seq[String]): Array[Byte] = seqBytes(s)(bytes) def seqBytes[T](s: Seq[T])(f: T => Array[Byte]): Array[Byte] = bytes(s map f) def bytes(b: Seq[Array[Byte]]): Array[Byte] = bytes(b.length) ++ b.flatten.toArray[Byte] def bytes(b: Boolean): Array[Byte] = Array[Byte](if (b) 1 else 0)
def filesModifiedBytes(fs: Array[File]): Array[Byte] = if (fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes)
def filesModifiedBytes(fs: Array[File]): Array[Byte] = if (fs eq null) filesModifiedBytes(Array[File]()) else seqBytes(fs)(fileModifiedBytes)
def fileModifiedBytes(f: File): Array[Byte] = (if (f.isDirectory) filesModifiedBytes(f listFiles classDirFilter) else bytes(f.lastModified)) ++ bytes(f.getAbsolutePath) def fileExistsBytes(f: File): Array[Byte] = bytes(f.exists) ++ bytes(f.getAbsolutePath) def bytes(s: String): Array[Byte] = s getBytes "UTF-8"
def bytes(l: Long): Array[Byte] = { val buffer = ByteBuffer.allocate(8) buffer.putLong(l) buffer.array } def bytes(i: Int): Array[Byte] = { val buffer = ByteBuffer.allocate(4) buffer.putInt(i) buffer.array }
def bytes(l: Long): Array[Byte] = { val buffer = ByteBuffer.allocate(8) buffer.putLong(l) buffer.array } def bytes(i: Int): Array[Byte] = { val buffer = ByteBuffer.allocate(4) buffer.putInt(i) buffer.array }
/** The name of the synthetic val in the synthetic module that an expression is assigned to. */ final val WrapValName = "$sbtdef" /** * Gets the value of the expression wrapped in module `objectName`, which is accessible via `loader`. * The module name should not include the trailing `$`. */
def getValue[T](objectName: String, loader: ClassLoader): T = { val module = getModule(objectName, loader) val accessor = module.getClass.getMethod(WrapValName) val value = accessor.invoke(module) value.asInstanceOf[T] }
def getValue[T](objectName: String, loader: ClassLoader): T = { val module = getModule(objectName, loader) val accessor = module.getClass.getMethod(WrapValName) val value = accessor.invoke(module) value.asInstanceOf[T] }
/** Gets the top-level module `moduleName` from the provided class `loader`. The module name should not include the trailing `$`.*/
def getModule(moduleName: String, loader: ClassLoader): Any = { val clazz = Class.forName(moduleName + "$", true, loader) clazz.getField("MODULE$").get(null) }
def getModule(moduleName: String, loader: ClassLoader): Any = { val clazz = Class.forName(moduleName + "$", true, loader) clazz.getField("MODULE$").get(null) }
private val classDirFilter: FileFilter = DirectoryFilter || GlobFilter("*.class") }

29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
override def write[T: JsonWriter](value: T): Unit = content = converter.toJsonUnsafe(value) }
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)(implicit cache: SingletonCache[T]): U = {
private def testCache[T: JsonFormat, U](f: (SingletonCache[T], CacheStore) => U)( implicit cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter) f(cache, store) }
private def cachePreservesEquality[T: JsonFormat](m: T, eq: (T, T) => Prop, str: T => String): Prop = testCache[T, Prop] { (cache, store) => cache.write(store, m) val out = cache.read(store) eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
private def cachePreservesEquality[T: JsonFormat](m: T, eq: (T, T) => Prop, str: T => String): Prop = testCache[T, Prop] { (cache, store) => cache.write(store, m) val out = cache.read(store) eq(out, m) :| s"Expected: ${str(m)}" :| s"Got: ${str(out)}"
} implicit val arbExclusionRule: Arbitrary[ExclusionRule] = Arbitrary( for { o <- Gen.alphaStr
...
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104
explicitArtifacts <- Gen.listOf(arbitrary[Artifact]) exclusions <- Gen.listOf(arbitrary[ExclusionRule]) inclusions <- Gen.listOf(arbitrary[InclusionRule]) extraAttributes <- Gen.mapOf(arbitrary[(String, String)]) crossVersion <- arbitrary[CrossVersion]
} yield ModuleID( organization = o, name = n, revision = r, configurations = cs, isChanging = isChanging, isTransitive = isTransitive, isForce = isForce, explicitArtifacts = explicitArtifacts.toVector, inclusions = inclusions.toVector, exclusions = exclusions.toVector, extraAttributes = extraAttributes, crossVersion = crossVersion, branchName = branch )
} yield ModuleID( organization = o, name = n, revision = r, configurations = cs, isChanging = isChanging, isTransitive = isTransitive, isForce = isForce, explicitArtifacts = explicitArtifacts.toVector, inclusions = inclusions.toVector, exclusions = exclusions.toVector, extraAttributes = extraAttributes, crossVersion = crossVersion, branchName = branch )
} property("moduleIDFormat") = forAll { (m: ModuleID) => def str(m: ModuleID) = { import m._

14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
private[this] lazy val eval = new Eval(_ => reporter, None) property("inferred integer") = forAll { (i: Int) => val result = eval.eval(i.toString) (label("Value", value(result)) |: (value(result) == i)) &&
(label("Type", value(result)) |: (result.tpe == IntType)) && (label("Files", result.generated) |: (result.generated.isEmpty))
(label("Type", value(result)) |: (result.tpe == IntType)) && (label("Files", result.generated) |: (result.generated.isEmpty))
} property("explicit integer") = forAll { (i: Int) => val result = eval.eval(i.toString, tpeName = Some(IntType)) (label("Value", value(result)) |: (value(result) == i)) &&
(label("Type", result.tpe) |: (result.tpe == IntType)) && (label("Files", result.generated) |: (result.generated.isEmpty))
(label("Type", result.tpe) |: (result.tpe == IntType)) && (label("Files", result.generated) |: (result.generated.isEmpty))
} property("type mismatch") = forAll { (i: Int, l: Int) => val line = math.abs(l) val src = "mismatch"
throws(classOf[RuntimeException])(eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) && hasErrors(line + 1, src)
throws(classOf[RuntimeException])( eval.eval(i.toString, tpeName = Some(BooleanType), line = line, srcName = src)) && hasErrors(line + 1, src)
} property("backed local class") = forAll { (i: Int) => IO.withTemporaryDirectory { dir => val eval = new Eval(_ => reporter, backing = Some(dir)) val result = eval.eval(local(i)) val v = value(result).asInstanceOf[{ def i: Int }].i (label("Value", v) |: (v == i)) &&
(label("Type", result.tpe) |: (result.tpe == LocalType)) && (label("Files", result.generated) |: result.generated.nonEmpty)
(label("Type", result.tpe) |: (result.tpe == LocalType)) && (label("Files", result.generated) |: result.generated.nonEmpty)
} } val ValTestNames = Set("x", "a") val ValTestContent = """
...
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
} """ property("val test") = secure { val defs = (ValTestContent, 1 to 7) :: Nil
val res = eval.evalDefinitions(defs, new EvalImports(Nil, ""), "<defs>", None, "scala.Int" :: Nil)
val res = eval.evalDefinitions(defs, new EvalImports(Nil, ""), "<defs>", None, "scala.Int" :: Nil)
label("Val names", res.valNames) |: (res.valNames.toSet == ValTestNames) } property("explicit import") = forAll(testImport("import math.abs" :: Nil)) property("wildcard import") = forAll(testImport("import math._" :: Nil))
property("comma-separated imports") = forAll(testImport("import annotation._, math._, meta._" :: Nil)) property("multiple imports") = forAll(testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil))
property("comma-separated imports") = forAll( testImport("import annotation._, math._, meta._" :: Nil)) property("multiple imports") = forAll( testImport("import annotation._" :: "import math._" :: "import meta._" :: Nil))
private[this] def testImport(imports: Seq[String]): Int => Prop = i => value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs(i)
private[this] def testImport(imports: Seq[String]): Int => Prop = i => value(eval.eval("abs(" + i + ")", new EvalImports(imports.zipWithIndex, "imp"))) == math.abs( i)
private[this] def local(i: Int) = "{ class ETest(val i: Int); new ETest(" + i + ") }" val LocalType = "AnyRef{val i: Int}" private[this] def value(r: EvalResult) = r.getValue(getClass.getClassLoader)
private[this] def hasErrors(line: Int, src: String) = { val is = reporter.infos ("Has errors" |: is.nonEmpty) && all(is.toSeq.map(validPosition(line, src)): _*) } private[this] def validPosition(line: Int, src: String)(i: Info) = { val nme = i.pos.source.file.name (label("Severity", i.severity) |: (i.severity == ERROR)) && (label("Line", i.pos.line) |: (i.pos.line == line)) && (label("Name", nme) |: (nme == src)) }
private[this] def hasErrors(line: Int, src: String) = { val is = reporter.infos ("Has errors" |: is.nonEmpty) && all(is.toSeq.map(validPosition(line, src)): _*) } private[this] def validPosition(line: Int, src: String)(i: Info) = { val nme = i.pos.source.file.name (label("Severity", i.severity) |: (i.severity == ERROR)) && (label("Line", i.pos.line) |: (i.pos.line == line)) && (label("Name", nme) |: (nme == src)) }
val IntType = "Int" val BooleanType = "Boolean" def label(s: String, value: Any) = s + " (" + value + ")" }

14 15 16 17 18 19 20 21 22 23 24 14 15 16 17 18 19 20 21 22 23 24 25 26
val TemplateCommand = "new" /** The command name to terminate the program.*/ val TerminateAction: String = Exit
def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
def helpDetailed = s"""$HelpCommand Prints a help summary. $HelpCommand <command>
...
28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
$HelpCommand <regular expression> Searches the help according to the provided regular expression. """
def CompletionsDetailed = "Displays a list of completions for the given argument string (run 'completions <string>')."
def CompletionsDetailed = "Displays a list of completions for the given argument string (run 'completions <string>')."
def CompletionsBrief = (CompletionsCommand, CompletionsDetailed) def templateBrief = (TemplateCommand, "Creates a new sbt build.") def templateDetailed = TemplateCommand + """ [--options] <template> Create a new sbt build based on the given template."""
def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.") def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start))
def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.") def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start))
def exitBrief = "Terminates the build."
def logLevelHelp = { val levels = Level.values.toSeq val levelList = levels.mkString(", ") val brief = ("<log-level>", "Sets the logging level to 'log-level'. Valid levels: " + levelList) val detailed = levels.map(l => (l.toString, logLevelDetail(l))).toMap Help(brief, detailed) }
def logLevelHelp = { val levels = Level.values.toSeq val levelList = levels.mkString(", ") val brief = ("<log-level>", "Sets the logging level to 'log-level'. Valid levels: " + levelList) val detailed = levels.map(l => (l.toString, logLevelDetail(l))).toMap Help(brief, detailed) }
private[this] def logLevelDetail(level: Level.Value): String = s"""$level Sets the global logging level to $level.
...
68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
def runEarly(command: String) = s"$EarlyCommand($command)" private[sbt] def isEarlyCommand(s: String): Boolean = { val levelOptions = Level.values.toSeq map { "-" + _ } (s.startsWith(EarlyCommand + "(") && s.endsWith(")")) ||
(levelOptions contains s)
(levelOptions contains s)
} val EarlyCommand = "early"
val EarlyCommandBrief = (s"$EarlyCommand(<command>)", "Schedules a command to run before other commands on startup.")
val EarlyCommandBrief = (s"$EarlyCommand(<command>)", "Schedules a command to run before other commands on startup.")
val EarlyCommandDetailed = s"""$EarlyCommand(<command>) Schedules an early command, which will be run before other commands on the command line. The order is preserved between all early commands, so `sbt "early(a)" "early(b)"` executes `a` and `b` in order.
...
115 116 117 118 119 120 121 122 123 124 125 121 122 123 124 125 126 127 128 129 130 131 132 133
If 'full' is specified, the boot directory (`~/.sbt/boot` by default) is deleted before restarting. This forces an update of sbt and Scala and is useful when working with development versions of sbt or Scala.""" def Multi = ";"
def MultiBrief = (Multi + " <command> (" + Multi + " <command>)*", "Runs the provided semicolon-separated commands.")
def MultiBrief = (Multi + " <command> (" + Multi + " <command>)*", "Runs the provided semicolon-separated commands.")
def MultiDetailed = Multi + " command1 " + Multi + """ command2 ... Runs the specified commands."""
...
148 149 150 151 152 153 154 155 156 157 158 156 157 158 159 160 161 162 163 164 165 166 167
$AliasCommand name= Removes the alias for `name`.""" def Shell = "shell"
def ShellDetailed = "Provides an interactive prompt and network server from which commands can be run."
def ShellDetailed = "Provides an interactive prompt and network server from which commands can be run."
def OldShell = "oldshell" def OldShellDetailed = "Provides an interactive prompt from which commands can be run." def Client = "client"
...
166 167 168 169 170 171 172 173 174 175 176 175 176 177 178 179 180 181 182 183 184 185 186
object Compat { def OnFailure = "-" def ClearOnFailure = "--" def FailureWall = "---" def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure)
def ClearOnFailureDeprecated = deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def ClearOnFailureDeprecated = deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall) private[this] def deprecatedAlias(oldName: String, newName: String): String = s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in 0.14.0" }

26 27 28 29 30 31 32 33 34 35 36 37 38 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
import sbt.io.IO import scala.util.control.NonFatal object BasicCommands { lazy val allBasicCommands: Seq[Command] = Seq(
nop, ignore, help, completionsCommand, multi, ifLast, append, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, reboot, call, early, exit, continuous, history, oldshell, client, read, alias
nop, ignore, help, completionsCommand, multi, ifLast, append, setOnFailure, clearOnFailure, stashOnFailure, popOnFailure, reboot, call, early, exit, continuous, history, oldshell, client, read, alias
) ++ compatCommands def nop: Command = Command.custom(s => success(() => s)) def ignore: Command = Command.command(FailureWall)(idFun)
...
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
private[this] def earlyHelp = Help(EarlyCommand, EarlyCommandBrief, EarlyCommandDetailed) def help: Command = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser)
def helpParser(s: State): Parser[() => State] = { val h = (Help.empty /: s.definedCommands)((a, b) => a ++ (try b.help(s) catch { case NonFatal(_) => Help.empty })) val helpCommands = h.detail.keySet val spacedArg = singleArgument(helpCommands).? applyEffect(spacedArg)(runHelp(s, h)) }
def helpParser(s: State): Parser[() => State] = { val h = (Help.empty /: s.definedCommands)( (a, b) => a ++ (try b.help(s) catch { case NonFatal(_) => Help.empty })) val helpCommands = h.detail.keySet val spacedArg = singleArgument(helpCommands).? applyEffect(spacedArg)(runHelp(s, h)) }
def runHelp(s: State, h: Help)(arg: Option[String]): State = { val message = try Help.message(h, arg) catch { case NonFatal(ex) => ex.toString } System.out.println(message) s }
def runHelp(s: State, h: Help)(arg: Option[String]): State = { val message = try Help.message(h, arg) catch { case NonFatal(ex) => ex.toString } System.out.println(message) s }
def completionsCommand: Command =
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)(runCompletions(_)(_))
Command(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)( runCompletions(_)(_))
def completionsParser(state: State): Parser[String] = { val notQuoted = (NotQuoted ~ any.*) map { case (nq, s) => nq ++ s } val quotedOrUnquotedSingleArgument = Space ~> (StringVerbatim | StringEscapable | notQuoted) token(quotedOrUnquotedSingleArgument ?? "" examples ("", " ")) } def runCompletions(state: State)(input: String): State = {
Parser.completions(state.combinedParser, input, 9).get map { c => if (c.isEmpty) input else input + c.append
Parser.completions(state.combinedParser, input, 9).get map { c => if (c.isEmpty) input else input + c.append
} foreach { c => System.out.println("[completions] " + c.replaceAll("\n", " ")) } state }
def multiParser(s: State): Parser[List[String]] = { val nonSemi = token(charClass(_ != ';').+, hide = const(true)) val semi = token(';' ~> OptSpace) val part = semi flatMap (_ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)) (part map (_.trim)).+ map (_.toList) }
def multiParser(s: State): Parser[List[String]] = { val nonSemi = token(charClass(_ != ';').+, hide = const(true)) val semi = token(';' ~> OptSpace) val part = semi flatMap (_ => matched((s.combinedParser & nonSemi) | nonSemi) <~ token(OptSpace)) (part map (_.trim)).+ map (_.toList) }
def multiApplied(s: State): Parser[() => State] = Command.applyEffect(multiParser(s))(_ ::: s) def multi: Command = Command.custom(multiApplied, Help(Multi, MultiBrief, MultiDetailed))
...
102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
(s: State) => token(OptSpace ~> combinedLax(s, NotSpaceClass ~ any.*)) def combinedLax(s: State, any: Parser[_]): Parser[String] = matched(s.combinedParser | token(any, hide = const(true)))
def ifLast: Command = Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s)
def ifLast: Command = Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) => if (s.remainingCommands.isEmpty) arg :: s else s)
def append: Command =
Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)((s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)))
Command(AppendCommand, Help.more(AppendCommand, AppendLastDetailed))(otherCommandParser)( (s, arg) => s.copy(remainingCommands = s.remainingCommands :+ Exec(arg, s.source)))
def setOnFailure: Command = Command(OnFailure, Help.more(OnFailure, OnFailureDetailed))(otherCommandParser)((s, arg) => s.copy(onFailure = Some(Exec(arg, s.source)))) private[sbt] def compatCommands = Seq( Command.command(Compat.ClearOnFailure) { s => s.log.warn(Compat.ClearOnFailureDeprecated) s.copy(onFailure = None) },
Command.arb(s => token(Compat.OnFailure, hide = const(true)) .flatMap(_ => otherCommandParser(s))) { (s, arg) =>
Command.arb( s => token(Compat.OnFailure, hide = const(true)) .flatMap(_ => otherCommandParser(s))) { (s, arg) =>
s.log.warn(Compat.OnFailureDeprecated) s.copy(onFailure = Some(Exec(arg, s.source))) }, Command.command(Compat.FailureWall) { s => s.log.warn(Compat.FailureWallDeprecated)
...
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221
} ) def clearOnFailure: Command = Command.command(ClearOnFailure)(s => s.copy(onFailure = None))
def stashOnFailure: Command = Command.command(StashOnFailure)(s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
def stashOnFailure: Command = Command.command(StashOnFailure)(s => s.copy(onFailure = None).update(OnFailureStack)(s.onFailure :: _.toList.flatten))
def popOnFailure: Command = Command.command(PopOnFailure) { s => val stack = s.get(OnFailureStack).getOrElse(Nil)
val updated = if (stack.isEmpty) s.remove(OnFailureStack) else s.put(OnFailureStack, stack.tail)
val updated = if (stack.isEmpty) s.remove(OnFailureStack) else s.put(OnFailureStack, stack.tail)
updated.copy(onFailure = stack.headOption.flatten) } def reboot: Command =
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser)((s, full) => s reboot full)
Command(RebootCommand, Help.more(RebootCommand, RebootDetailed))(rebootParser)((s, full) => s reboot full)
def rebootParser(s: State): Parser[Boolean] = token(Space ~> "full" ^^^ true) ?? false
def call: Command = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) { case (state, (cp, args)) => val parentLoader = getClass.getClassLoader def argsStr = args mkString ", " def cpStr = cp mkString File.pathSeparator def fromCpStr = if (cp.isEmpty) "" else s" from $cpStr" state.log info s"Applying State transformations $argsStr$fromCpStr" val loader = if (cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader) val loaded = args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State]) (state /: loaded)((s, obj) => obj(s)) }
def call: Command = Command(ApplyCommand, Help.more(ApplyCommand, ApplyDetailed))(_ => callParser) { case (state, (cp, args)) => val parentLoader = getClass.getClassLoader def argsStr = args mkString ", " def cpStr = cp mkString File.pathSeparator def fromCpStr = if (cp.isEmpty) "" else s" from $cpStr" state.log info s"Applying State transformations $argsStr$fromCpStr" val loader = if (cp.isEmpty) parentLoader else toLoader(cp.map(f => new File(f)), parentLoader) val loaded = args.map(arg => ModuleUtilities.getObject(arg, loader).asInstanceOf[State => State]) (state /: loaded)((s, obj) => obj(s)) }
def callParser: Parser[(Seq[String], Seq[String])] = token(Space) ~> ((classpathOptionParser ?? Nil) ~ rep1sep(className, token(Space)))
private[this] def className: Parser[String] = { val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.") def single(s: String) = Completions.single(Completion.displayOnly(s)) val compl = TokenCompletions.fixed((seen, _) => if (seen.startsWith("-")) Completions.nil else single("<class name>")) token(base, compl) }
private[this] def className: Parser[String] = { val base = StringBasic & not('-' ~> any.*, "Class name cannot start with '-'.") def single(s: String) = Completions.single(Completion.displayOnly(s)) val compl = TokenCompletions.fixed((seen, _) => if (seen.startsWith("-")) Completions.nil else single("<class name>")) token(base, compl) }
private[this] def classpathOptionParser: Parser[Seq[String]] = token(("-cp" | "-classpath") ~> Space) ~> classpathStrings <~ token(Space) private[this] def classpathStrings: Parser[Seq[String]] = token(StringBasic.map(s => IO.pathSplit(s).toSeq), "<classpath>") def exit: Command = Command.command(TerminateAction, exitBrief, exitBrief)(_ exit true) def continuous: Command =
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) { (s, arg) => withAttribute(s, Watched.Configuration, "Continuous execution not configured.") { w => val repeat = ContinuousExecutePrefix + (if (arg.startsWith(" ")) arg else " " + arg) Watched.executeContinuously(w, s, arg, repeat) }
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) { (s, arg) => withAttribute(s, Watched.Configuration, "Continuous execution not configured.") { w => val repeat = ContinuousExecutePrefix + (if (arg.startsWith(" ")) arg else " " + arg) Watched.executeContinuously(w, s, arg, repeat) }
} def history: Command = Command.custom(historyParser, BasicCommandStrings.historyHelp) def historyParser(s: State): Parser[() => State] =
...
207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258
val prompt = (s get shellPrompt) match { case Some(pf) => pf(s); case None => "> " } val reader = new FullReader(history, s.combinedParser) val line = reader.readLine(prompt) line match { case Some(line) =>
val newState = s.copy( onFailure = Some(Exec(Shell, None)), remainingCommands = Exec(line, s.source) +: Exec(OldShell, None) +: s.remainingCommands ).setInteractive(true)
val newState = s .copy( onFailure = Some(Exec(Shell, None)), remainingCommands = Exec(line, s.source) +: Exec(OldShell, None) +: s.remainingCommands ) .setInteractive(true)
if (line.trim.isEmpty) newState else newState.clearGlobalLog case None => s.setInteractive(false) } }
def client: Command = Command(Client, Help.more(Client, ClientDetailed))(_ => clientParser)(runClient)
def client: Command = Command(Client, Help.more(Client, ClientDetailed))(_ => clientParser)(runClient)
def clientParser: Parser[Seq[String]] = (token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF) map (_ => Nil)) def runClient(s0: State, inputArg: Seq[String]): State = {
...
231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
}) NetworkClient.run(arguments) "exit" :: s0.copy(remainingCommands = Nil) }
def read: Command = Command(ReadCommand, Help.more(ReadCommand, ReadDetailed))(readParser)(doRead(_)(_))
def read: Command = Command(ReadCommand, Help.more(ReadCommand, ReadDetailed))(readParser)(doRead(_)(_))
def readParser(s: State): Parser[Either[Int, Seq[File]]] = { val files = (token(Space) ~> fileParser(s.baseDir)).+ val portAndSuccess = token(OptSpace) ~> Port portAndSuccess || files }
def readParser(s: State): Parser[Either[Int, Seq[File]]] = { val files = (token(Space) ~> fileParser(s.baseDir)).+ val portAndSuccess = token(OptSpace) ~> Port portAndSuccess || files }
def doRead(s: State)(arg: Either[Int, Seq[File]]): State = arg match { case Left(portAndSuccess) => val port = math.abs(portAndSuccess)
...
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324
s.log.error("Command file(s) not readable: \n\t" + notFound.mkString("\n\t")) s } }
private def readMessage(port: Int, previousSuccess: Boolean): Option[String] = { // split into two connections because this first connection ends the previous communication xsbt.IPC.client(port) { _.send(previousSuccess.toString) } // and this second connection starts the next communication xsbt.IPC.client(port) { ipc => val message = ipc.receive if (message eq null) None else Some(message) }
private def readMessage(port: Int, previousSuccess: Boolean): Option[String] = { // split into two connections because this first connection ends the previous communication xsbt.IPC.client(port) { _.send(previousSuccess.toString) } // and this second connection starts the next communication xsbt.IPC.client(port) { ipc => val message = ipc.receive if (message eq null) None else Some(message)
}
}
def alias: Command = Command(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s => val name = token(OpOrID.examples(aliasNames(s): _*)) val assign = token(OptSpace ~ '=' ~ OptSpace) val sfree = removeAliases(s) val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string OptSpace ~> (name ~ (assign ~> to.?).?).? }(runAlias)
def alias: Command = Command(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s => val name = token(OpOrID.examples(aliasNames(s): _*)) val assign = token(OptSpace ~ '=' ~ OptSpace) val sfree = removeAliases(s) val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string OptSpace ~> (name ~ (assign ~> to.?).?).? }(runAlias)
def runAlias(s: State, args: Option[(String, Option[Option[String]])]): State = args match { case None => printAliases(s); s
...
334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392
def allAliases(s: State): Seq[(String, String)] = aliases(s, (_, _) => true) def aliases(s: State, pred: (String, String) => Boolean): Seq[(String, String)] = s.definedCommands.flatMap(c => getAlias(c).filter(tupled(pred))) def newAlias(name: String, value: String): Command =
Command.make(name, (name, s"'$value'"), s"Alias of '$value'")(aliasBody(name, value))
Command .make(name, (name, s"'$value'"), s"Alias of '$value'")(aliasBody(name, value))
.tag(CommandAliasKey, (name, value)) def aliasBody(name: String, value: String)(state: State): Parser[() => State] = { val aliasRemoved = removeAlias(state, name) // apply the alias value to the commands of `state` except for the alias to avoid recursion (#933)
val partiallyApplied = Parser(Command.combine(aliasRemoved.definedCommands)(aliasRemoved))(value)
val partiallyApplied = Parser(Command.combine(aliasRemoved.definedCommands)(aliasRemoved))(value)
val arg = matched(partiallyApplied & (success(()) | (SpaceClass ~ any.*))) // by scheduling the expanded alias instead of directly executing, // we get errors on the expanded string (#598) arg.map(str => () => (value + str) :: state) }
def delegateToAlias(name: String, orElse: Parser[() => State])(state: State): Parser[() => State] =
def delegateToAlias(name: String, orElse: Parser[() => State])( state: State): Parser[() => State] =
aliases(state, (nme, _) => nme == name).headOption match { case None => orElse case Some((n, v)) => aliasBody(n, v)(state) }

4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
import sbt.internal.util.AttributeKey import sbt.internal.inc.classpath.ClassLoaderCache import sbt.librarymanagement.ModuleID object BasicKeys {
val historyPath = AttributeKey[Option[File]]("history", "The location where command line history is persisted.", 40) val shellPrompt = AttributeKey[State => String]("shell-prompt", "The function that constructs the command prompt from the current build state.", 10000)
val historyPath = AttributeKey[Option[File]]( "history", "The location where command line history is persisted.", 40) val shellPrompt = AttributeKey[State => String]( "shell-prompt", "The function that constructs the command prompt from the current build state.", 10000)
val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000)
val serverPort = AttributeKey[Int]("server-port", "The port number used by server command.", 10000) private[sbt] val interactive = AttributeKey[Boolean]("interactive", "True if commands are currently being entered from an interactive environment.", 10) private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache]("class-loader-cache", "Caches class loaders based on the classpath entries and last modified times.", 10) private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]]("on-failure-stack", "Stack that remembers on-failure handlers.", 10) private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean]("explicit-global-log-levels", "True if the global logging levels were explicitly set by the user.", 10) private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]]("templateResolverInfos", "List of template resolver infos.", 1000)
val serverPort = AttributeKey[Int]("server-port", "The port number used by server command.", 10000) private[sbt] val interactive = AttributeKey[Boolean]( "interactive", "True if commands are currently being entered from an interactive environment.", 10) private[sbt] val classLoaderCache = AttributeKey[ClassLoaderCache]( "class-loader-cache", "Caches class loaders based on the classpath entries and last modified times.", 10) private[sbt] val OnFailureStack = AttributeKey[List[Option[Exec]]]( "on-failure-stack", "Stack that remembers on-failure handlers.", 10) private[sbt] val explicitGlobalLogLevels = AttributeKey[Boolean]( "explicit-global-log-levels", "True if the global logging levels were explicitly set by the user.", 10) private[sbt] val templateResolverInfos = AttributeKey[Seq[TemplateResolverInfo]]( "templateResolverInfos", "List of template resolver infos.", 1000)
} case class TemplateResolverInfo(module: ModuleID, implementationClass: String)

25 26 27 28 29 30 31 32 33 34 35 25 26 27 28 29 30 31 32 33 34
private[sbt] final class SimpleCommand( val name: String, private[sbt] val help0: Help, val parser: State => Parser[() => State],
val tags: AttributeMap ) extends Command { assert(Command validID name, s"'$name' is not a valid command name.")
...
40 41 42 43 44 45 46 47 48 49 50 39 40 41 42 43 44 45 46 47 48 49 50 51
override def toString = s"SimpleCommand($name)" } private[sbt] final class ArbitraryCommand(
val parser: State => Parser[() => State], val help: State => Help, val tags: AttributeMap
val parser: State => Parser[() => State], val help: State => Help, val tags: AttributeMap
) extends Command { def tag[T](key: AttributeKey[T], value: T): ArbitraryCommand = new ArbitraryCommand(parser, help, tags.put(key, value)) }
...