Skip to content

Commit

Permalink
Implements basic crossbuild for 2.{11,12,13}
Browse files Browse the repository at this point in the history
Done:

* Compile and tests for 2.11
* Assembly for 2.11

TODO:

* Compile and tests for 2.12 and 2.13
* Assembly for 2.12 and 2.13

Work toward #84
  • Loading branch information
colindean committed Jun 9, 2022
1 parent 379163e commit 75375e3
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 60 deletions.
138 changes: 80 additions & 58 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
name := "data-validator"
organization := "com.target"

scalaVersion := "2.11.12"

val sparkVersion = "2.3.1"

val circeVersion = "0.10.0"

//addDependencyTreePlugin
val scala211 = "2.11.12"
val scala212 = "2.12.15"
val scala213 = "2.13.8"

ThisBuild / organization := "com.target"
enablePlugins(GitVersioning)
git.useGitDescribe := true
ThisBuild / git.useGitDescribe := true
ThisBuild / versionScheme := Some("early-semver")

// sbt auto-reload on changes
Global / onChangedBuildSource := ReloadOnSourceChanges

// Enforces scalastyle checks
val compileScalastyle = TaskKey[Unit]("compileScalastyle")
val generateTestData = TaskKey[Unit]("generateTestData")

/////////////
// Publishing
/////////////
Expand All @@ -23,59 +27,77 @@ githubRepository := "data-validator"
githubTokenSource := (TokenSource.Environment("GITHUB_TOKEN") ||
TokenSource.GitConfig("github.token") ||
TokenSource.Environment("SHELL")) // it's safe to assume this exists and is not unique

publishTo := githubPublishTo.value

enablePlugins(BuildInfoPlugin)
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion)
buildInfoPackage := "com.target.data_validator"
lazy val commonSettings: SettingsDefinition = Def.settings(
name := "data-validator",
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion),
buildInfoPackage := "com.target.data_validator",
libraryDependencies ++= Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.8.0",
"com.github.scopt" %% "scopt" % "3.7.0",
"com.sun.mail" % "javax.mail" % "1.6.2",
"com.lihaoyi" %% "scalatags" % "0.6.7",
"io.circe" %% "circe-yaml" % "0.9.0",
"io.circe" %% "circe-core" % circeVersion,
"io.circe" %% "circe-generic" % circeVersion,
"io.circe" %% "circe-parser" % circeVersion,
// "org.apache.spark" %% "spark-sql" % sparkVersion % Provided,
"org.scalatest" %% "scalatest" % "3.0.5" % Test,
"junit" % "junit" % "4.12" % Test,
"com.novocode" % "junit-interface" % "0.11" % Test exclude ("junit", "junit-dep")
),
(Test / fork) := true,
javaOptions ++= Seq("-Xms512M", "-Xmx2048M", "-XX:+CMSClassUnloadingEnabled"),
(Test / parallelExecution) := false,

libraryDependencies ++= Seq(
"com.typesafe.scala-logging" %% "scala-logging" % "3.8.0",
"com.github.scopt" %% "scopt" % "3.7.0",
"com.sun.mail" % "javax.mail" % "1.6.2",
"com.lihaoyi" %% "scalatags" % "0.6.7",
"io.circe" %% "circe-yaml" % "0.9.0",
"io.circe" %% "circe-core" % circeVersion,
"io.circe" %% "circe-generic" % circeVersion,
"io.circe" %% "circe-parser" % circeVersion,
"org.apache.spark" %% "spark-sql" % sparkVersion % Provided,

"org.scalatest" %% "scalatest" % "3.0.5" % Test,
"junit" % "junit" % "4.12" % Test,
"com.novocode" % "junit-interface" % "0.11" % Test exclude ("junit", "junit-dep")
)
// required for unit tests, but not set in some environments
(Test / envVars) ++= Map(
"JAVA_HOME" ->
Option(System.getenv("JAVA_HOME"))
.getOrElse(System.getProperty("java.home"))
),
(assembly / mainClass) := Some("com.target.data_validator.Main"),
scalastyleFailOnWarning := true,
scalastyleFailOnError := true,
compileScalastyle := (Compile / scalastyle).toTask("").value,
(Compile / compile) := ((Compile / compile) dependsOn compileScalastyle).value,
(Compile / run) := Defaults
.runTask(Compile / fullClasspath, Compile / run / mainClass, Compile / run / runner)
.evaluated,

Test / fork := true
javaOptions ++= Seq("-Xms512M", "-Xmx2048M", "-XX:+CMSClassUnloadingEnabled")
Test / parallelExecution := false
// required for unit tests, but not set in some environments
Test / envVars ++= Map(
"JAVA_HOME" ->
Option(System.getenv("JAVA_HOME"))
.getOrElse(System.getProperty("java.home"))
/////////////
// Publishing
/////////////
githubOwner := "target",
githubRepository := "data-validator",
// this unfortunately must be set strangely because GitHub requires a token for pulling packages
// and sbt-github-packages does not allow the user to configure the resolver not to be used.
// https://github.com/djspiewak/sbt-github-packages/issues/28
githubTokenSource := (TokenSource.Environment("GITHUB_TOKEN") ||
TokenSource.GitConfig("github.token") ||
TokenSource.Environment("SHELL")), // it's safe to assume this exists and is not unique
publishTo := githubPublishTo.value
)

assembly / mainClass := Some("com.target.data_validator.Main")

// Enforces scalastyle checks
val compileScalastyle = TaskKey[Unit]("compileScalastyle")
scalastyleFailOnWarning := true
scalastyleFailOnError := true

compileScalastyle := (Compile / scalastyle).toTask("").value
(Compile / compile) := ((Compile / compile) dependsOn compileScalastyle).value

(Compile / run) := Defaults
.runTask(
Compile / fullClasspath,
Compile / run / mainClass,
Compile / run / runner
lazy val root = (projectMatrix in file("."))
.enablePlugins(BuildInfoPlugin)
.settings(commonSettings)
.jvmPlatform(
scalaVersions = Seq(scala211),
settings = Seq(
libraryDependencies += "org.apache.spark" %% "spark-sql" % "2.3.4" % Provided,
(Compile / runMain) := Defaults.runMainTask(Compile / fullClasspath, Compile / run / runner).evaluated,
generateTestData := {
(Compile / runMain).toTask(" com.target.data_validator.GenTestData").value
}
)
)
.jvmPlatform(
scalaVersions = Seq(scala212),
settings = Seq(libraryDependencies += "org.apache.spark" %% "spark-sql" % "2.4.8" % Provided)
)
.jvmPlatform(
scalaVersions = Seq(scala213),
settings = Seq(libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.2.1" % Provided)
)
.evaluated

(Compile / runMain) := Defaults.runMainTask(Compile / fullClasspath, Compile / run / runner).evaluated
TaskKey[Unit]("generateTestData") := {
libraryDependencies += "org.apache.spark" %% "spark-sql" % sparkVersion
(Compile / runMain).toTask(" com.target.data_validator.GenTestData").value
}
1 change: 1 addition & 0 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0")
addSbtPlugin("com.codecommit" % "sbt-github-packages" % "0.5.3")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.9.0")
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import com.target.data_validator.validator.{MinNumRows, NullCheck}
import io.circe.Json
import org.scalatest.{BeforeAndAfterAll, FunSpec}

import scala.io.Source

class ConfigParserSpec extends FunSpec with BeforeAndAfterAll {

// Silence is golden!
Expand Down Expand Up @@ -106,7 +108,8 @@ class ConfigParserSpec extends FunSpec with BeforeAndAfterAll {
describe("parseFile") {

it("should support loading config files by path") {
val output = ConfigParser.parseFile("src/test/resources/test_config.yaml", Map.empty)
val path = getClass.getResource("/test_config.yaml").getPath
val output = ConfigParser.parseFile(path, Map.empty)
assert(output == Right(expectedConfiguration))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class ValidatorSpecifiedFormatLoaderSpec extends WordSpec with Matchers with Tes
MinNumRows(JsonUtils.string2Json("9"))
),
options = None,
loadData = Some(List("src/test/resources/format_test.jsonl"))
loadData = Some(List(getClass.getResource("/format_test.jsonl").getPath))
)

val didFail = loader.quickChecks(spark, mkDict())(mkConfig(List(loader)))
Expand Down

0 comments on commit 75375e3

Please sign in to comment.