Skip to content

Commit

Permalink
Update GDAL & JDK versions (#3538)
Browse files Browse the repository at this point in the history
* Update GDAL & JDK versions

* Add alternative docker images built from osgeo/gdal

* Add JDK21 builds

* Add Int8 GDAL 3.7+ support

* Add JDK21 builds

* Cleanup Dockerfile; upd CI

* Bump GDALWarp version up

* Dry dataTypeToCellType function
  • Loading branch information
pomadchin authored May 15, 2024
1 parent b74122c commit 6c3e8c8
Show file tree
Hide file tree
Showing 15 changed files with 138 additions and 697 deletions.
20 changes: 18 additions & 2 deletions .github/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,25 @@ services:
network_mode: host

test-jdk11:
image: quay.io/azavea/openjdk-gdal:3.1-jdk11-slim
image: daunnc/osgeo-gdal-jdk11:3.8.5
working_dir: /usr/local/src
command: ./sbt ++$SCALA_VERSION test
command: sbt ++$SCALA_VERSION test
environment:
- CI
- SCALA_VERSION
depends_on:
- nginx
- minio
- cassandra
- hbase
volumes:
- ./../:/usr/local/src
network_mode: host

test-jdk21:
image: daunnc/osgeo-gdal-jdk21:3.8.5
working_dir: /usr/local/src
command: bash -c "cp -f .jvmopts.211 .jvmopts; sbt ++$SCALA_VERSION test"
environment:
- CI
- SCALA_VERSION
Expand Down
34 changes: 34 additions & 0 deletions .github/docker/gdal/3.x/ubuntu/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# docker buildx create --use --platform=linux/arm64,linux/amd64 --name multi-platform-builder
# docker buildx inspect --bootstrap
#
# docker buildx build --build-arg="JAVA_VERSION=11" --build-arg="GDAL_VERSION=3.8.5" --platform linux/amd64,linux/arm64 --push --tag daunnc/osgeo-gdal-jdk11:3.8.5 .
# docker buildx build --build-arg="JAVA_VERSION=17" --build-arg="GDAL_VERSION=3.8.5" --platform linux/amd64,linux/arm64 --push --tag daunnc/osgeo-gdal-jdk17:3.8.5 .
# docker buildx build --build-arg="JAVA_VERSION=21" --build-arg="GDAL_VERSION=3.8.5" --platform linux/amd64,linux/arm64 --push --tag daunnc/osgeo-gdal-jdk21:3.8.5 .

ARG GDAL_VERSION=3.8.5
FROM ghcr.io/osgeo/gdal:ubuntu-full-${GDAL_VERSION}

ARG JAVA_VERSION=11

LABEL Grigory Pomadchin <[email protected]>

ENV DEBIAN_FRONTEND noninteractive

RUN set -ex && \
apt update -y && \
apt install --no-install-recommends -y openjdk-${JAVA_VERSION}-jdk

ENV JAVA_HOME /usr/lib/jvm/java-${JAVA_VERSION}-openjdk-amd64
RUN update-alternatives --set java `update-alternatives --list java | grep java-${JAVA_VERSION}`

ENV LD_LIBRARY_PATH "/lib/x86_64-linux-gnu/:/lib/x86_64-linux-gnu/jni/:/lib/aarch64-linux-gnu/:/lib/aarch64-linux-gnu/jni/:$LD_LIBRARY_PATH"

RUN apt-get update && apt-get -y install bash wget unzip gpg software-properties-common

# Install SBT
RUN apt-get install -y curl gnupg && \
echo 'deb https://repo.scala-sbt.org/scalasbt/debian all main' | tee /etc/apt/sources.list.d/sbt.list && \
echo 'deb https://repo.scala-sbt.org/scalasbt/debian /' | tee /etc/apt/sources.list.d/sbt_old.list && \
curl -sL 'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823' | apt-key add - && \
apt-get update && \
apt-get install -y sbt
10 changes: 5 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ jobs:
strategy:
matrix:
scala: ["2.12.19", "2.13.14"]
java: ["11"]
java: ["11", "21"]
runs-on: ubuntu-latest

env:
SCALA_VERSION: ${{ matrix.scala }}
JAVA_VERSION: ${{ matrix.java }}

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0

Expand All @@ -32,7 +32,7 @@ jobs:

- name: run tests
run: |
docker compose -f .github/docker-compose.yml up test-jdk11 --abort-on-container-exit --exit-code-from test-jdk11
docker compose -f .github/docker-compose.yml up "test-jdk${JAVA_VERSION}" --abort-on-container-exit --exit-code-from "test-jdk${JAVA_VERSION}"
publish:
name: Publish Artifacts
Expand All @@ -45,11 +45,11 @@ jobs:
distribution: [temurin]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: coursier/cache-action@v6
- uses: actions/setup-java@v3
- uses: actions/setup-java@v4
with:
distribution: ${{ matrix.distribution }}
java-version: ${{ matrix.java }}
Expand Down
18 changes: 18 additions & 0 deletions .jvmopts.211
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
-XX:+IgnoreUnrecognizedVMOptions
--add-modules=jdk.incubator.vector
--add-opens=java.base/java.lang=ALL-UNNAMED
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED
--add-opens=java.base/java.io=ALL-UNNAMED
--add-opens=java.base/java.net=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/java.util=ALL-UNNAMED
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED
--add-opens=java.base/sun.security.action=ALL-UNNAMED
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED
-Djdk.reflect.useDirectMethodHandle=false
-Dio.netty.tryReflectionSetAccessible=true
Original file line number Diff line number Diff line change
Expand Up @@ -147,12 +147,12 @@ class GDALRasterSummarySpec extends AnyFunSpec with TestEnvironment with GivenWh
contextRDD.stitch().tile.band(0).renderPng().write("/tmp/raster-source-contextrdd-gdal.png")
}

it("Should cleanup GDAL Datasets by the end of the loop (10 iterations)") {
it("should cleanup GDAL Datasets by the end of the loop (10 iterations)") {
val inputPath = gdalGeoTiffPath("vlm/aspect-tiled.tif")
val targetCRS = WebMercator
val method = Bilinear
val layout = LayoutDefinition(GridExtent[Int](Extent(-2.0037508342789244E7, -2.0037508342789244E7, 2.0037508342789244E7, 2.0037508342789244E7), CellSize(9.554628535647032, 9.554628535647032)), 256)
val RasterExtent(Extent(exmin, eymin, exmax, eymax), ecw, ech, ecols, erows) = RasterExtent(Extent(-8769161.632988561, 4257685.794912352, -8750616.09900087, 4274482.8318780195), CellSize(9.554628535647412, 9.554628535646911))
val RasterExtent(Extent(exmin, eymin, exmax, eymax), ecw, ech, ecols, erows) = RasterExtent(Extent(-8769161.632988561, 4257685.794912352, -8750625.653629405, 4274473.277249484), CellSize(9.554628535647032,9.554628535647032))

cfor(0)(_ < 11, _ + 1) { _ =>
val reference = GDALRasterSource(inputPath).reproject(targetCRS, method = method).tileToLayout(layout, method)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ object GDALDataType {
List(
UnknownType,
TypeByte,
TypeInt8,
TypeUInt16,
TypeInt16,
TypeUInt32,
Expand Down Expand Up @@ -50,9 +51,11 @@ abstract sealed class GDALDataType(val code: Int) {
def toString: String = code.toString
}

// https://github.com/geotrellis/gdal-warp-bindings/blob/9d75e7c65c4c8a0c2c39175a75656bba458a46f0/src/main/java/com/azavea/gdal/GDALWarp.java#L26-L38
// https://github.com/geotrellis/gdal-warp-bindings/blob/v3.8.1/src/main/java/com/azavea/gdal/GDALWarp.java#L29-L42
// https://github.com/OSGeo/gdal/blob/v3.8.5/swig/include/gdal.i#L148-L165
case object UnknownType extends GDALDataType(GDALWarp.GDT_Unknown)
case object TypeByte extends GDALDataType(GDALWarp.GDT_Byte)
case object TypeInt8 extends GDALDataType(GDALWarp.GDT_Int8)
case object TypeUInt16 extends GDALDataType(GDALWarp.GDT_UInt16)
case object TypeInt16 extends GDALDataType(GDALWarp.GDT_Int16)
case object TypeUInt32 extends GDALDataType(GDALWarp.GDT_UInt32)
Expand Down
4 changes: 2 additions & 2 deletions gdal/src/main/scala/geotrellis/raster/gdal/GDALDataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -370,8 +370,8 @@ case class GDALDataset(token: Long) extends AnyVal {
/** To handle the [[BitCellType]] it is possible to fetch NBITS information from the RasterBand metadata, **/
lazy val bitsPerSample = md.get("NBITS").map(_.toInt)
/** To handle the [[ByteCellType]] it is possible to fetch information about the sampleFormat from the RasterBand metadata. **/
lazy val signedByte = md.get("PIXELTYPE").contains("SIGNEDBYTE")
GDALUtils.dataTypeToCellType(datatype = dt, noDataValue = nd, typeSizeInBits = bitsPerSample, signedByte = signedByte)
lazy val isSignedByte = md.get("PIXELTYPE").contains("SIGNEDBYTE")
GDALUtils.dataTypeToCellType(datatype = dt, noDataValue = nd, typeSizeInBits = bitsPerSample, isSignedByte = isSignedByte)
}

def readTile(gb: GridBounds[Int] = GridBounds(dimensions), band: Int, datasetType: DatasetType = GDALDataset.WARPED): Tile = {
Expand Down
18 changes: 11 additions & 7 deletions gdal/src/main/scala/geotrellis/raster/gdal/GDALUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -41,23 +41,26 @@ object GDALUtils {
case _ => throw new Exception(s"Could not find equivalent GDALResampleMethod for: $method")
}

def dataTypeToCellType(datatype: GDALDataType, noDataValue: Option[Double] = None, typeSizeInBits: => Option[Int] = None, signedByte: => Boolean = false): CellType =
def dataTypeToCellType(datatype: GDALDataType, noDataValue: Option[Double] = None, typeSizeInBits: => Option[Int] = None, isSignedByte: => Boolean = false): CellType = {
def byteCellType: CellType = noDataValue match {
case Some(nd) if nd.toInt > Byte.MinValue.toInt && nd <= Byte.MaxValue.toInt => ByteUserDefinedNoDataCellType(nd.toByte)
case Some(nd) if nd.toInt == Byte.MinValue.toInt => ByteConstantNoDataCellType
case _ => ByteCellType
}

datatype match {
case TypeByte =>
typeSizeInBits match {
case Some(bits) if bits == 1 => BitCellType
case _ =>
if(!signedByte) noDataValue match {
if(!isSignedByte) noDataValue match {
case Some(nd) if nd.toInt > 0 && nd <= 255 => UByteUserDefinedNoDataCellType(nd.toByte)
case Some(nd) if nd.toInt == 0 => UByteConstantNoDataCellType
case _ => UByteCellType
}
else noDataValue match {
case Some(nd) if nd.toInt > Byte.MinValue.toInt && nd <= Byte.MaxValue.toInt => ByteUserDefinedNoDataCellType(nd.toByte)
case Some(nd) if nd.toInt == Byte.MinValue.toInt => ByteConstantNoDataCellType
case _ => ByteCellType
}
else byteCellType
}
case TypeInt8 => byteCellType
case TypeUInt16 =>
noDataValue match {
case Some(nd) if nd.toInt > 0 && nd <= 65535 => UShortUserDefinedNoDataCellType(nd.toShort)
Expand Down Expand Up @@ -99,6 +102,7 @@ object GDALUtils {
case TypeCInt16 | TypeCInt32 | TypeCFloat32 | TypeCFloat64 =>
throw new UnsupportedOperationException("Complex datatypes are not supported.")
}
}

def deriveOverviewStrategyString(strategy: OverviewStrategy): String = strategy match {
case Auto(n) if n == 0 => "AUTO"
Expand Down
Binary file modified gdal/src/test/resources/vlm/aspect-tiled-bilinear-linux.tif
Binary file not shown.
Binary file modified gdal/src/test/resources/vlm/aspect-tiled-near-merc-rdd.tif
Binary file not shown.
Binary file modified gdal/src/test/resources/vlm/aspect-tiled-near.tif
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ class GDALReprojectRasterSourceSpec extends AnyFunSpec with RasterMatchers with
val uri = Resource.path("vlm/aspect-tiled.tif")

/**
* For some reasons, the Pipeline described above is OS specific,
* and Bilinear interpolation behaves differently.
* For some reason, the Pipeline described above is OS specific, and Bilinear interpolation behaves differently.
* To make tests pass there was generated one bilinear version under mac and anther inside a linux container.
* This test may require tiffs regeneration since resample method slightly changes with different the GDAL version change.
*
* TODO: investigate the nature of this bug later
* */
Expand Down
4 changes: 2 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ object Version {
val cassandra = "4.17.0"
val hbase = "2.5.8"
val hadoop = "3.3.6"
val gdal = "3.1.0"
val gdalWarp = "1.2.0"
val gdal = "3.8.0"
val gdalWarp = "3.8.1"

val previousVersion = "3.6.0"
}
Expand Down
50 changes: 40 additions & 10 deletions project/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ import mdoc.MdocPlugin.autoImport._
import java.io.File

object Settings {
def javaMajorVersion: Int = System.getProperty("java.version").split("\\.").head.toInt

object Repositories {
val apacheSnapshots = "apache-snapshots" at "https://repository.apache.org/content/repositories/snapshots/"
val eclipseReleases = "eclipse-releases" at "https://repo.eclipse.org/content/groups/releases"
Expand Down Expand Up @@ -152,6 +154,34 @@ object Settings {
)
)

lazy val java17SparkSettings = Seq(
// JDK17+ https://github.com/apache/spark/blob/v3.5.1/pom.xml#L299-L317
javaOptions ++= {
if (javaMajorVersion >= 17)
Seq(
"-XX:+IgnoreUnrecognizedVMOptions",
"--add-modules=jdk.incubator.vector",
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
"--add-opens=java.base/java.io=ALL-UNNAMED",
"--add-opens=java.base/java.net=ALL-UNNAMED",
"--add-opens=java.base/java.nio=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED",
"--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
"-Djdk.reflect.useDirectMethodHandle=false",
"-Dio.netty.tryReflectionSetAccessible=true"
)
else Nil
}
)

lazy val sparkCompatDependencies = Def.setting { CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.3") // spark uses it as a par collections compat
case Some((2, 12)) => Nil
Expand Down Expand Up @@ -210,7 +240,7 @@ object Settings {
import geotrellis.spark._
import geotrellis.spark.store.accumulo._
"""
) ++ commonSettings ++ noForkInTests
) ++ commonSettings ++ java17SparkSettings ++ noForkInTests

lazy val bench = Seq(
libraryDependencies += sl4jnop,
Expand Down Expand Up @@ -269,7 +299,7 @@ object Settings {
import geotrellis.spark.util._
import geotrellis.spark.store.cassandra._
"""
) ++ noForkInTests ++ commonSettings
) ++ commonSettings ++ java17SparkSettings ++ noForkInTests


lazy val `doc-examples` = Seq(
Expand Down Expand Up @@ -353,7 +383,7 @@ object Settings {
import geotrellis.spark.store.hbase._
import geotrellis.store.hbase._
"""
) ++ commonSettings ++ noForkInTests
) ++ commonSettings ++ java17SparkSettings ++ noForkInTests

lazy val macros = Seq(
name := "geotrellis-macros",
Expand Down Expand Up @@ -461,7 +491,7 @@ object Settings {
import geotrellis.spark._
import geotrellis.spark.store.s3._
"""
) ++ noForkInTests ++ commonSettings
) ++ commonSettings ++ java17SparkSettings ++ noForkInTests

lazy val shapefile = Seq(
name := "geotrellis-shapefile",
Expand Down Expand Up @@ -498,7 +528,7 @@ object Settings {
import geotrellis.spark._
import geotrellis.spark.util._
"""
) ++ noForkInTests ++ commonSettings
) ++ commonSettings ++ java17SparkSettings ++ noForkInTests

lazy val `spark-pipeline` = Seq(
name := "geotrellis-spark-pipeline",
Expand Down Expand Up @@ -530,7 +560,7 @@ object Settings {
case "META-INF/ECLIPSEF.RSA" | "META-INF/ECLIPSEF.SF" => MergeStrategy.discard
case _ => MergeStrategy.first
}
) ++ commonSettings
) ++ commonSettings ++ java17SparkSettings

lazy val `spark-testkit` = Seq(
name := "geotrellis-spark-testkit",
Expand All @@ -540,7 +570,7 @@ object Settings {
apacheSpark("sql").value % Provided,
scalatest
)
) ++ commonSettings
) ++ commonSettings ++ java17SparkSettings

lazy val util = Seq(
name := "geotrellis-util",
Expand Down Expand Up @@ -632,7 +662,7 @@ object Settings {
Test / fork := true,
Test / parallelExecution := false,
Test / testOptions += Tests.Argument("-oDF"),
javaOptions ++= Seq("-Djava.library.path=/usr/local/lib")
// javaOptions ++= Seq("-Djava.library.path=/usr/lib:/usr/local/lib")
) ++ commonSettings

lazy val `gdal-spark` = Seq(
Expand All @@ -647,6 +677,6 @@ object Settings {
Test / fork := true,
Test / parallelExecution := false,
Test / testOptions += Tests.Argument("-oDF"),
javaOptions ++= Seq("-Djava.library.path=/usr/local/lib")
) ++ commonSettings
// javaOptions ++= Seq("-Djava.library.path=/usr/lib:/usr/local/lib")
) ++ commonSettings ++ java17SparkSettings
}
Loading

0 comments on commit 6c3e8c8

Please sign in to comment.