@@ -52,9 +52,11 @@ lazy val cobrix = (project in file("."))
52
52
.disablePlugins(sbtassembly.AssemblyPlugin )
53
53
.settings(
54
54
name := " cobrix" ,
55
+ crossScalaVersions := List (scala211, scala212, scala213),
55
56
56
57
// No need to publish the aggregation [empty] artifact
57
58
publishArtifact := false ,
59
+ publish / skip := true ,
58
60
publish := {},
59
61
publishLocal := {}
60
62
)
@@ -65,13 +67,13 @@ lazy val cobolParser = (project in file("cobol-parser"))
65
67
.enablePlugins(AutomateHeaderPlugin )
66
68
.settings(
67
69
name := " cobol-parser" ,
70
+ crossScalaVersions := List (scala211, scala212, scala213),
68
71
libraryDependencies ++= CobolParserDependencies :+ getScalaDependency(scalaVersion.value),
69
72
shadedDependencies ++= CobolParserShadedDependencies ,
70
73
shadingRules ++= Seq (
71
74
ShadingRule .moveUnder(" org.antlr.v4.runtime" , " za.co.absa.cobrix.cobol.parser.shaded" )
72
75
),
73
76
validNamespaces ++= Set (" za" ),
74
- releasePublishArtifactsAction := PgpKeys .publishSigned.value,
75
77
assemblySettings,
76
78
jacocoReportSettings := commonJacocoReportSettings.withTitle(" cobrix:cobol-parser Jacoco Report" ),
77
79
jacocoExcludes := commonJacocoExcludes
@@ -83,16 +85,19 @@ lazy val cobolConverters = (project in file("cobol-converters"))
83
85
.enablePlugins(AutomateHeaderPlugin )
84
86
.settings(
85
87
name := " cobol-converters" ,
88
+ crossScalaVersions := List (scala211, scala212, scala213),
86
89
libraryDependencies ++= CobolConvertersDependencies :+ getScalaDependency(scalaVersion.value),
87
90
// No need to publish this artifact since it has test only at the moment
88
91
publishArtifact := false ,
92
+ publish / skip := true ,
89
93
publish := {},
90
94
publishLocal := {}
91
95
)
92
96
93
97
lazy val sparkCobol = (project in file(" spark-cobol" ))
94
98
.settings(
95
99
name := " spark-cobol" ,
100
+ crossScalaVersions := List (scala211, scala212, scala213),
96
101
printSparkVersion := {
97
102
val log = streams.value.log
98
103
log.info(s " Building with Spark ${sparkVersion(scalaVersion.value)}, Scala ${scalaVersion.value}" )
@@ -111,7 +116,6 @@ lazy val sparkCobol = (project in file("spark-cobol"))
111
116
libraryDependencies ++= SparkCobolDependencies (scalaVersion.value) :+ getScalaDependency(scalaVersion.value),
112
117
Test / fork := true , // Spark tests fail randomly otherwise
113
118
populateBuildInfoTemplate,
114
- releasePublishArtifactsAction := PgpKeys .publishSigned.value,
115
119
assemblySettings
116
120
).dependsOn(cobolParser)
117
121
.settings(
@@ -124,10 +128,6 @@ lazy val sparkCobol = (project in file("spark-cobol"))
124
128
ThisBuild / coverageExcludedPackages := " .*examples.*;.*replication.*"
125
129
ThisBuild / coverageExcludedFiles := " .*Example.*;Test.*"
126
130
127
- // release settings
128
- releaseCrossBuild := true
129
- addCommandAlias(" releaseNow" , " ;set releaseVersionBump := sbtrelease.Version.Bump.Bugfix; release with-defaults" )
130
-
131
131
lazy val assemblySettings = Seq (
132
132
// This merge strategy retains service entries for all services in manifest.
133
133
// It allows custom Spark data sources to be used together, e.g. 'spark-xml' and 'spark-cobol'.
0 commit comments