Skip to content

Commit

Permalink
=pro akka#15031 use sbt-unidoc plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
2m committed Apr 28, 2014
1 parent d84d583 commit 7b35ded
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 123 deletions.
79 changes: 39 additions & 40 deletions project/AkkaBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ import java.io.{PrintWriter, InputStreamReader, FileInputStream, File}
import java.nio.charset.Charset
import java.util.Properties
import annotation.tailrec
import Unidoc.{ JavaDoc, javadocSettings, junidocSources, sunidoc, unidocExclude }
import TestExtras. { JUnitFileReporting, StatsDMetrics }
import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, scalaJavaUnidocSettings, genjavadocSettings, scalaUnidocSettings }
import sbtunidoc.Plugin.UnidocKeys.{ unidoc, unidocProjectFilter }
import TestExtras.{ JUnitFileReporting, StatsDMetrics }
import com.typesafe.sbt.S3Plugin.{ S3, s3Settings }

object AkkaBuild extends Build {
Expand All @@ -49,17 +50,15 @@ object AkkaBuild extends Build {
lazy val akka = Project(
id = "akka",
base = file("."),
settings = parentSettings ++ Release.settings ++ Unidoc.settings ++ Publish.versionSettings ++
SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ unidocScaladocSettings ++
StatsDMetrics.settings ++
Protobuf.settings ++ inConfig(JavaDoc)(Defaults.configSettings) ++ Seq(
settings = parentSettings ++ Release.settings ++ unidocSettings ++ Publish.versionSettings ++
SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ scaladocSettings ++
StatsDMetrics.settings ++ Protobuf.settings ++ inTask(unidoc)(Seq(
unidocProjectFilter in ScalaUnidoc := docProjectFilter,
unidocProjectFilter in JavaUnidoc := docProjectFilter,
apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value
)) ++ Seq(
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository",
unidocExclude := Seq(samples.id, remoteTests.id),
sources in JavaDoc <<= junidocSources,
javacOptions in JavaDoc := Seq(),
artifactName in packageDoc in JavaDoc := ((sv, mod, art) => "" + mod.name + "_" + sv.binary + "-" + mod.revision + "-javadoc.jar"),
packageDoc in Compile <<= packageDoc in JavaDoc,
Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id),
// generate online version of docs
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
Expand All @@ -71,11 +70,11 @@ object AkkaBuild extends Build {
S3.progress in S3.upload := true,
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
val downloads = d / "downloads"
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
},
// add reportBinaryIssues to validatePullRequest on minor version maintenance branch
validatePullRequest <<= (Unidoc.unidoc, SphinxSupport.generate in Sphinx in docs) map { (_, _) => }
validatePullRequest <<= (unidoc in Compile, SphinxSupport.generate in Sphinx in docs) map { (_, _) => }
),
aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, slf4j, agent,
persistence, zeroMQ, kernel, osgi, docs, contrib, samples, multiNodeTestkit)
Expand Down Expand Up @@ -288,7 +287,7 @@ object AkkaBuild extends Build {
id = "akka-samples",
base = file("akka-samples"),
settings = parentSettings ++ ActivatorDist.settings,
aggregate = Seq(camelSampleJava, camelSampleScala, mainSampleJava, mainSampleScala,
aggregate = Seq(camelSampleJava, camelSampleScala, mainSampleJava, mainSampleScala,
remoteSampleJava, remoteSampleScala, clusterSampleJava, clusterSampleScala,
fsmSampleScala, persistenceSampleJava, persistenceSampleScala,
multiNodeSampleScala, helloKernelSample, osgiDiningHakkersSample)
Expand All @@ -300,7 +299,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor, camel),
settings = sampleSettings ++ Seq(libraryDependencies ++= Dependencies.camelSample)
)

lazy val camelSampleScala = Project(
id = "akka-sample-camel-scala",
base = file("akka-samples/akka-sample-camel-scala"),
Expand All @@ -321,7 +320,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor),
settings = sampleSettings
)

lazy val mainSampleScala = Project(
id = "akka-sample-main-scala",
base = file("akka-samples/akka-sample-main-scala"),
Expand All @@ -342,7 +341,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor, remote),
settings = sampleSettings
)

lazy val remoteSampleScala = Project(
id = "akka-sample-remote-scala",
base = file("akka-samples/akka-sample-remote-scala"),
Expand Down Expand Up @@ -381,7 +380,7 @@ object AkkaBuild extends Build {
}
)
) configs (MultiJvm)

lazy val clusterSampleScala = Project(
id = "akka-sample-cluster-scala",
base = file("akka-samples/akka-sample-cluster-scala"),
Expand All @@ -399,7 +398,7 @@ object AkkaBuild extends Build {
}
)
) configs (MultiJvm)

lazy val multiNodeSampleScala = Project(
id = "akka-sample-multi-node-scala",
base = file("akka-samples/akka-sample-multi-node-scala"),
Expand Down Expand Up @@ -471,7 +470,7 @@ object AkkaBuild extends Build {
}},
// force publication of artifacts to local maven repo
compile in Compile <<=
(publishM2 in actor, publishM2 in testkit, publishM2 in remote, publishM2 in cluster, publishM2 in osgi,
(publishM2 in actor, publishM2 in testkit, publishM2 in remote, publishM2 in cluster, publishM2 in osgi,
publishM2 in slf4j, publishM2 in persistence, compile in Compile) map
((_, _, _, _, _, _, _, c) => c))
else Seq.empty
Expand Down Expand Up @@ -718,13 +717,12 @@ object AkkaBuild extends Build {

// don't save test output to a file
testListeners in (Test, test) := Seq(TestLogger(streams.value.log, {_ => streams.value.log }, logBuffered.value)),

validatePullRequestTask,
// add reportBinaryIssues to validatePullRequest on minor version maintenance branch
validatePullRequest <<= validatePullRequest.dependsOn(reportBinaryIssues)

) ++ mavenLocalResolverSettings ++ JUnitFileReporting.settings ++ StatsDMetrics.settings

) ++ mavenLocalResolverSettings ++ JUnitFileReporting.settings ++ StatsDMetrics.settings

val validatePullRequest = TaskKey[Unit]("validate-pull-request", "Additional tasks for pull request validation")
// the tasks that to run for validation is defined in defaultSettings
Expand Down Expand Up @@ -774,7 +772,7 @@ object AkkaBuild extends Build {
ScalariformKeys.preferences in Compile := formattingPreferences,
ScalariformKeys.preferences in Test := formattingPreferences
)

lazy val docFormatSettings = SbtScalariform.scalariformSettings ++ Seq(
ScalariformKeys.preferences in Compile := docFormattingPreferences,
ScalariformKeys.preferences in Test := docFormattingPreferences,
Expand All @@ -788,7 +786,7 @@ object AkkaBuild extends Build {
.setPreference(AlignParameters, true)
.setPreference(AlignSingleLineCaseStatements, true)
}

def docFormattingPreferences = {
import scalariform.formatter.preferences._
FormattingPreferences()
Expand Down Expand Up @@ -833,6 +831,15 @@ object AkkaBuild extends Build {
case (false, _) => Seq.empty
})

val genjavadocEnabled = System.getProperty("akka.genjavadoc.enabled", "false").toBoolean
val (unidocSettings, javadocSettings) =
if (genjavadocEnabled) (scalaJavaUnidocSettings, genjavadocSettings)
else (scalaUnidocSettings, Nil)

val docProjectFilter = inAnyProject --
inAggregates(samples, transitive = true, includeRoot = true) --
inProjects(remoteTests)

lazy val scaladocDiagramsEnabled = System.getProperty("akka.scaladoc.diagrams", "true").toBoolean
lazy val scaladocAutoAPI = System.getProperty("akka.scaladoc.autoapi", "true").toBoolean

Expand All @@ -846,22 +853,14 @@ object AkkaBuild extends Build {
scaladocSettingsNoVerificationOfDiagrams ++
(if (scaladocDiagramsEnabled) Seq(doc in Compile ~= scaladocVerifier) else Seq.empty)
}

// for projects with few (one) classes there might not be any diagrams
lazy val scaladocSettingsNoVerificationOfDiagrams: Seq[sbt.Setting[_]] = {
inTask(doc)(Seq(
scalacOptions in Compile <++= (version, baseDirectory in akka) map scaladocOptions,
autoAPIMappings := scaladocAutoAPI
))
}

lazy val unidocScaladocSettings: Seq[sbt.Setting[_]]= {
inTask(doc)(Seq(
scalacOptions <++= (version, baseDirectory in akka) map scaladocOptions,
autoAPIMappings := scaladocAutoAPI
)) ++
(if (scaladocDiagramsEnabled) Seq(sunidoc ~= scaladocVerifier) else Seq.empty)
}

def scaladocVerifier(file: File): File= {
@tailrec
Expand Down Expand Up @@ -892,7 +891,7 @@ object AkkaBuild extends Build {
else
file
}

lazy val mimaIgnoredProblems = {
import com.typesafe.tools.mima.core._
Seq(
Expand All @@ -906,7 +905,7 @@ object AkkaBuild extends Build {
binaryIssueFilters ++= mimaIgnoredProblems
)

def akkaPreviousArtifact(id: String, organization: String = "com.typesafe.akka", version: String = "2.3.0",
def akkaPreviousArtifact(id: String, organization: String = "com.typesafe.akka", version: String = "2.3.0",
crossVersion: String = "2.10"): Option[sbt.ModuleID] =
if (enableMiMa) {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
Expand Down Expand Up @@ -945,8 +944,8 @@ object AkkaBuild extends Build {
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"),
// dynamicImportPackage needed for loading classes defined in configuration
OsgiKeys.dynamicImportPackage := Seq("*")
)
)

val agent = exports(Seq("akka.agent.*"))

val camel = exports(Seq("akka.camel.*"))
Expand Down Expand Up @@ -977,7 +976,7 @@ object AkkaBuild extends Build {
// needed because testkit is normally not used in the application bundle,
// but it should still be included as transitive dependency and used by BundleDelegatingClassLoader
// to be able to find refererence.conf
"akka.testkit",
"akka.testkit",
"com.google.protobuf")

def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
Expand Down Expand Up @@ -1071,7 +1070,7 @@ object Dependencies {
}

import Compile._

val scalaXmlDepencency = (if (AkkaBuild.requestedScalaVersion.startsWith("2.10")) Nil else Seq(Test.scalaXml))

val actor = Seq(config)
Expand Down
3 changes: 2 additions & 1 deletion project/Dist.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import sbt.classpath.ClasspathUtilities
import sbt.Project.Initialize
import java.io.File
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
import sbtunidoc.Plugin._

object Dist {
case class DistSources(depJars: Seq[File], libJars: Seq[File], srcJars: Seq[File], docJars: Seq[File], api: File, docs: File)
Expand All @@ -29,7 +30,7 @@ object Dist {
distLibJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageBin.task in Compile),
distSrcJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageSrc.task in Compile),
distDocJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageDoc.task in Compile),
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, Unidoc.sunidoc, generate in Sphinx in docsProject) map DistSources,
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, doc in ScalaUnidoc, generate in Sphinx in docsProject) map DistSources,
distDirectory <<= crossTarget / "dist",
distUnzipped <<= distDirectory / "unzipped",
distFile <<= (distDirectory, version) { (dir, v) => dir / ("akka-" + v + ".zip") },
Expand Down
5 changes: 3 additions & 2 deletions project/Release.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import java.io.File
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
import com.typesafe.sbt.pgp.PgpKeys.publishSigned
import com.typesafe.sbt.S3Plugin.S3
import sbtunidoc.Plugin.UnidocKeys._

object Release {
val releaseDirectory = SettingKey[File]("release-directory")
Expand All @@ -25,11 +26,11 @@ object Release {
val projectRef = extracted.get(thisProjectRef)
val repo = extracted.get(Publish.defaultPublishTo)
val state1 = extracted.runAggregated(publishSigned in projectRef, state)
val (state2, (api, japi)) = extracted.runTask(Unidoc.unidoc, state1)
val (state2, Seq(api, japi)) = extracted.runTask(unidoc in Compile, state1)
val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
val (state4, dist) = extracted.runTask(Dist.dist, state3)
val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4)

IO.delete(release)
IO.createDirectory(release)
IO.copyDirectory(repo, release / "releases")
Expand Down
80 changes: 0 additions & 80 deletions project/Unidoc.scala

This file was deleted.

2 changes: 2 additions & 0 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,6 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1")

addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")

addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.1")

libraryDependencies += "com.timgroup" % "java-statsd-client" % "2.0.0"

0 comments on commit 7b35ded

Please sign in to comment.