diff --git a/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerDeleter.scala b/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerDeleter.scala
index 3459221c8a..8728ced3bb 100644
--- a/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerDeleter.scala
+++ b/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerDeleter.scala
@@ -18,15 +18,16 @@ package geotrellis.spark.io.accumulo
import geotrellis.spark.LayerId
import geotrellis.spark.io._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.accumulo.core.client.{BatchWriterConfig, Connector}
import org.apache.accumulo.core.security.Authorizations
import org.apache.accumulo.core.data.{Range => AccumuloRange}
import scala.collection.JavaConverters._
-class AccumuloLayerDeleter(val attributeStore: AttributeStore, connector: Connector) extends LazyLogging with LayerDeleter[LayerId] {
+class AccumuloLayerDeleter(val attributeStore: AttributeStore, connector: Connector) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def delete(id: LayerId): Unit = {
try {
diff --git a/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerWriter.scala b/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerWriter.scala
index 3d376084bf..29cfa7ff22 100644
--- a/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerWriter.scala
+++ b/accumulo/src/main/scala/geotrellis/spark/io/accumulo/AccumuloLayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._
@@ -35,7 +35,8 @@ class AccumuloLayerWriter(
instance: AccumuloInstance,
table: String,
options: AccumuloLayerWriter.Options
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
// Layer Updating
def overwrite[
diff --git a/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerDeleter.scala b/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerDeleter.scala
index af7eafd30f..75000f21b9 100644
--- a/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerDeleter.scala
+++ b/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerDeleter.scala
@@ -18,14 +18,15 @@ package geotrellis.spark.io.cassandra
import geotrellis.spark.LayerId
import geotrellis.spark.io._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import com.datastax.driver.core.querybuilder.QueryBuilder
import com.datastax.driver.core.querybuilder.QueryBuilder.{eq => eqs}
import scala.collection.JavaConverters._
-class CassandraLayerDeleter(val attributeStore: AttributeStore, instance: CassandraInstance) extends LazyLogging with LayerDeleter[LayerId] {
+class CassandraLayerDeleter(val attributeStore: AttributeStore, instance: CassandraInstance) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def delete(id: LayerId): Unit = {
try {
diff --git a/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerWriter.scala b/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerWriter.scala
index 72ff3be7cc..474897b678 100644
--- a/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerWriter.scala
+++ b/cassandra/src/main/scala/geotrellis/spark/io/cassandra/CassandraLayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._
@@ -35,7 +35,8 @@ class CassandraLayerWriter(
instance: CassandraInstance,
keyspace: String,
table: String
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
// Layer updating
def overwrite[
diff --git a/docs/CHANGELOG.rst b/docs/CHANGELOG.rst
index 733c3eb55e..9956d36b07 100644
--- a/docs/CHANGELOG.rst
+++ b/docs/CHANGELOG.rst
@@ -1,6 +1,13 @@
Changelog
=========
+2.3.3
+_____
+*2019 Oct 4*
+
+- Remove direct usage of `com.typesafe.scalalogging.LazyLogging` trait
+- Upgrade to Proj4J 1.1.0
+
2.3.2
-----
*2019 Aug 12*
diff --git a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureReader.scala b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureReader.scala
index 2d37872096..59d749380d 100644
--- a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureReader.scala
+++ b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureReader.scala
@@ -20,8 +20,6 @@ import geotrellis.geotools._
import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._
-
-import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.mapreduce.InputFormatBase
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
@@ -38,9 +36,7 @@ import scala.reflect.ClassTag
* @define experimental EXPERIMENTAL@experimental
*/
@experimental class GeoMesaFeatureReader(val instance: GeoMesaInstance)
- (implicit sc: SparkContext) extends Serializable with LazyLogging {
-
- logger.error("GeoMesa support is experimental")
+ (implicit sc: SparkContext) extends Serializable {
/** $experimental */
@experimental def readSimpleFeatures(
diff --git a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureWriter.scala b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureWriter.scala
index 9375584490..9977b09824 100644
--- a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureWriter.scala
+++ b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaFeatureWriter.scala
@@ -21,7 +21,6 @@ import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
import org.geotools.data.Transaction
@@ -31,9 +30,7 @@ import org.opengis.feature.simple.SimpleFeatureType
* @define experimental EXPERIMENTAL@experimental
*/
@experimental class GeoMesaFeatureWriter(val instance: GeoMesaInstance)
- (implicit sc: SparkContext) extends Serializable with LazyLogging {
-
- logger.error("GeoMesa support is experimental")
+ (implicit sc: SparkContext) extends Serializable {
/** $experimental */
@experimental def write[G <: Geometry, D: ? => Seq[(String, Any)]: λ[α => Feature[G, α] => FeatureToGeoMesaSimpleFeatureMethods[G, α]]]
diff --git a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaInstance.scala b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaInstance.scala
index addef44f23..e06f6bcd65 100644
--- a/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaInstance.scala
+++ b/geomesa/src/main/scala/geotrellis/spark/io/geomesa/GeoMesaInstance.scala
@@ -19,7 +19,6 @@ package geotrellis.spark.io.geomesa
import geotrellis.spark.LayerId
import geotrellis.util.annotations.experimental
-import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.DataStoreFinder
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore
@@ -28,9 +27,7 @@ import scala.collection.JavaConverters._
/**
* @define experimental EXPERIMENTAL@experimental
*/
-@experimental class GeoMesaInstance(val conf: Map[String, String])
- extends Serializable with LazyLogging {
- logger.error("GeoMesa support is experimental")
+@experimental class GeoMesaInstance(val conf: Map[String, String]) extends Serializable {
val SEP = "__.__"
diff --git a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveAttributeStore.scala b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveAttributeStore.scala
index 2fd251c165..01ff98a185 100644
--- a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveAttributeStore.scala
+++ b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveAttributeStore.scala
@@ -25,8 +25,8 @@ import geotrellis.spark.io.accumulo.AccumuloAttributeStore
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.locationtech.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
@@ -127,9 +127,8 @@ import spray.json.DefaultJsonProtocol._
val accumuloUser: String,
val accumuloPass: String,
val geowaveNamespace: String
-) extends DiscreteLayerAttributeStore with LazyLogging {
-
- logger.error("GeoWave support is experimental")
+) extends DiscreteLayerAttributeStore {
+ @transient protected lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName))
val zkInstance = (new ZooKeeperInstance(accumuloInstance, zookeepers))
val token = new PasswordToken(accumuloPass)
diff --git a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerReader.scala b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerReader.scala
index 6669569750..c6bcb54977 100644
--- a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerReader.scala
+++ b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerReader.scala
@@ -27,8 +27,6 @@ import geotrellis.spark.tiling.{LayoutDefinition, MapKeyTransform}
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent
-
-import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
@@ -94,9 +92,7 @@ object GeoWaveLayerReader {
* @define experimental EXPERIMENTAL@experimental
*/
@experimental class GeoWaveLayerReader(val attributeStore: AttributeStore)
- (implicit sc: SparkContext) extends LazyLogging {
-
- logger.error("GeoWave support is experimental")
+ (implicit sc: SparkContext) {
val defaultNumPartitions = sc.defaultParallelism
diff --git a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerWriter.scala b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerWriter.scala
index f49a34bf20..bf5ce91dc0 100644
--- a/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerWriter.scala
+++ b/geowave/src/main/scala/geotrellis/spark/io/geowave/GeoWaveLayerWriter.scala
@@ -27,8 +27,8 @@ import geotrellis.spark.io.index.KeyIndex
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import mil.nga.giat.geowave.adapter.raster.adapter.merge.RasterTileRowTransform
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.index.dimension._
@@ -70,7 +70,8 @@ import mil.nga.giat.geowave.core.store.data.VisibilityWriter
/**
* @define experimental EXPERIMENTAL@experimental
*/
-@experimental object GeoWaveLayerWriter extends LazyLogging {
+@experimental object GeoWaveLayerWriter {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
/** $experimental */
@experimental def write[
@@ -247,10 +248,8 @@ import mil.nga.giat.geowave.core.store.data.VisibilityWriter
@experimental class GeoWaveLayerWriter(
val attributeStore: GeoWaveAttributeStore,
val accumuloWriter: AccumuloWriteStrategy
-)(implicit sc: SparkContext)
- extends LazyLogging {
-
- logger.error("GeoWave support is experimental")
+)(implicit sc: SparkContext) {
+ @transient protected lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName))
/** $experimental */
@experimental def write[
diff --git a/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerDeleter.scala b/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerDeleter.scala
index 4ddafa2a32..98e7098527 100644
--- a/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerDeleter.scala
+++ b/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerDeleter.scala
@@ -18,14 +18,15 @@ package geotrellis.spark.io.hbase
import geotrellis.spark.LayerId
import geotrellis.spark.io._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter.PrefixFilter
import scala.collection.JavaConverters._
-class HBaseLayerDeleter(val attributeStore: AttributeStore, instance: HBaseInstance) extends LazyLogging with LayerDeleter[LayerId] {
+class HBaseLayerDeleter(val attributeStore: AttributeStore, instance: HBaseInstance) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def delete(id: LayerId): Unit = {
try{
diff --git a/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerWriter.scala b/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerWriter.scala
index a43028f141..83ae7e0bf7 100644
--- a/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerWriter.scala
+++ b/hbase/src/main/scala/geotrellis/spark/io/hbase/HBaseLayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._
@@ -34,7 +34,8 @@ class HBaseLayerWriter(
val attributeStore: AttributeStore,
instance: HBaseInstance,
table: String
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
// Layer Updating
def overwrite[
diff --git a/project/Version.scala b/project/Version.scala
index a77a4b2954..c87d01b668 100644
--- a/project/Version.scala
+++ b/project/Version.scala
@@ -15,7 +15,7 @@
*/
object Version {
- val geotrellis = "2.3.2" + Environment.versionSuffix
+ val geotrellis = "2.3.3" + Environment.versionSuffix
val scala = "2.11.12"
val crossScala = Seq(scala, "2.12.7")
val geotools = "20.0"
diff --git a/raster/src/main/scala/geotrellis/raster/ArrayTile.scala b/raster/src/main/scala/geotrellis/raster/ArrayTile.scala
index d41487095a..c3ca54221b 100644
--- a/raster/src/main/scala/geotrellis/raster/ArrayTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/ArrayTile.scala
@@ -42,9 +42,6 @@ trait ArrayTile extends Tile with Serializable {
def convert(targetCellType: CellType): ArrayTile = {
val tile = ArrayTile.alloc(targetCellType, cols, rows)
- if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
- logger.debug(s"Conversion from $cellType to $targetCellType may lead to data loss.")
-
if(!cellType.isFloatingPoint) {
cfor(0)(_ < rows, _ + 1) { row =>
cfor(0)(_ < cols, _ + 1) { col =>
diff --git a/raster/src/main/scala/geotrellis/raster/CompositeTile.scala b/raster/src/main/scala/geotrellis/raster/CompositeTile.scala
index 5d695ebc73..fa11134558 100644
--- a/raster/src/main/scala/geotrellis/raster/CompositeTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/CompositeTile.scala
@@ -166,9 +166,6 @@ case class CompositeTile(tiles: Seq[Tile],
if (cols.toLong * rows.toLong > Int.MaxValue.toLong) {
sys.error("This tiled raster is too big to convert into an array.")
} else {
- if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
- logger.warn(s"Conversion from $cellType to $targetCellType may lead to data loss.")
-
val tile = ArrayTile.alloc(targetCellType, cols, rows)
val len = cols * rows
val layoutCols = tileLayout.layoutCols
diff --git a/raster/src/main/scala/geotrellis/raster/ConstantTile.scala b/raster/src/main/scala/geotrellis/raster/ConstantTile.scala
index 8f554cad46..c0f063fa91 100644
--- a/raster/src/main/scala/geotrellis/raster/ConstantTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/ConstantTile.scala
@@ -75,9 +75,6 @@ trait ConstantTile extends Tile {
* @return The new Tile
*/
def convert(newType: CellType): Tile = {
- if(newType.isFloatingPoint != cellType.isFloatingPoint)
- logger.warn(s"Conversion from $cellType to $newType may lead to data loss.")
-
newType match {
case BitCellType => new BitConstantTile(if (iVal == 0) false else true, cols, rows)
case ct: ByteCells => ByteConstantTile(iVal.toByte, cols, rows, ct)
diff --git a/raster/src/main/scala/geotrellis/raster/CroppedTile.scala b/raster/src/main/scala/geotrellis/raster/CroppedTile.scala
index d63e4be2f2..0762ba1e23 100644
--- a/raster/src/main/scala/geotrellis/raster/CroppedTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/CroppedTile.scala
@@ -138,9 +138,6 @@ case class CroppedTile(sourceTile: Tile,
* @return An MutableArrayTile
*/
def mutable(targetCellType: CellType): MutableArrayTile = {
- if(targetCellType.isFloatingPoint != cellType.isFloatingPoint)
- logger.warn(s"Conversion from $cellType to $targetCellType may lead to data loss.")
-
val tile = ArrayTile.alloc(targetCellType, cols, rows)
if(!cellType.isFloatingPoint) {
diff --git a/raster/src/main/scala/geotrellis/raster/Tile.scala b/raster/src/main/scala/geotrellis/raster/Tile.scala
index 8a2c3a2e27..1b0c6db8b0 100644
--- a/raster/src/main/scala/geotrellis/raster/Tile.scala
+++ b/raster/src/main/scala/geotrellis/raster/Tile.scala
@@ -16,12 +16,11 @@
package geotrellis.raster
-import com.typesafe.scalalogging.LazyLogging
/**
* Base trait for a Tile.
*/
-trait Tile extends CellGrid with IterableTile with MappableTile[Tile] with LazyLogging {
+trait Tile extends CellGrid with IterableTile with MappableTile[Tile] {
/**
* Execute a function at each pixel of a [[Tile]]. Two functions
@@ -97,7 +96,7 @@ trait Tile extends CellGrid with IterableTile with MappableTile[Tile] with LazyL
/**
* Returns a mutable instance of this tile.
- *
+ *
* @note When the underlying class is an instance of [[MutableArrayTile]] it will return itself without performing a copy.
* This is used internally as a performance optimization when the ownership of the tile is controlled.
*/
diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffMultibandTile.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffMultibandTile.scala
index c0cf685f91..76417b67c3 100644
--- a/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffMultibandTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffMultibandTile.scala
@@ -19,7 +19,6 @@ package geotrellis.raster.io.geotiff
import geotrellis.raster._
import geotrellis.raster.io.geotiff.compression._
-import com.typesafe.scalalogging.LazyLogging
import spire.syntax.cfor._
import scala.collection.mutable
@@ -276,7 +275,7 @@ object GeoTiffMultibandTile {
val segmentPixelCols = segmentLayout.tileLayout.tileCols
val segmentPixelRows = segmentLayout.tileLayout.tileRows
- val segments: Iterator[((Int, Int), MultibandTile)] =
+ val segments: Iterator[((Int, Int), MultibandTile)] =
for {
windowRowMin <- Iterator.range(start = 0, end = tile.rows, step = segmentPixelRows)
windowColMin <- Iterator.range(start = 0, end = tile.cols, step = segmentPixelCols)
@@ -308,7 +307,7 @@ abstract class GeoTiffMultibandTile(
val compression: Compression,
val bandCount: Int,
val overviews: List[GeoTiffMultibandTile] = Nil
-) extends MultibandTile with GeoTiffImageData with GeoTiffSegmentLayoutTransform with MacroGeotiffMultibandCombiners with LazyLogging {
+) extends MultibandTile with GeoTiffImageData with GeoTiffSegmentLayoutTransform with MacroGeotiffMultibandCombiners {
val cellType: CellType
val cols: Int = segmentLayout.totalCols
val rows: Int = segmentLayout.totalRows
diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffTile.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffTile.scala
index 07f8dece46..62e232d62a 100644
--- a/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffTile.scala
+++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/GeoTiffTile.scala
@@ -317,9 +317,6 @@ abstract class GeoTiffTile(
* @return A new [[Tile]] that contains the new CellTypes
*/
def convert(newCellType: CellType): GeoTiffTile = {
- if(newCellType.isFloatingPoint != cellType.isFloatingPoint)
- logger.warn(s"Conversion from $cellType to $newCellType may lead to data loss.")
-
val arr = Array.ofDim[Array[Byte]](segmentCount)
val compressor = compression.createCompressor(segmentCount)
diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala
index 41ca57fa5e..479c891d8a 100644
--- a/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala
+++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/LazySegmentBytes.scala
@@ -18,8 +18,8 @@ package geotrellis.raster.io.geotiff
import geotrellis.util._
import geotrellis.raster.io.geotiff.tags._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import monocle.syntax.apply._
/**
@@ -38,9 +38,11 @@ class LazySegmentBytes(
tiffTags: TiffTags,
maxChunkSize: Int = 32 * 1024 * 1024,
maxOffsetBetweenChunks: Int = 1024
-) extends SegmentBytes with LazyLogging {
+) extends SegmentBytes {
import LazySegmentBytes.Segment
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def length: Int = tiffTags.segmentCount
val (segmentOffsets, segmentByteCounts) =
diff --git a/raster/src/main/scala/geotrellis/raster/io/geotiff/reader/TiffTagsReader.scala b/raster/src/main/scala/geotrellis/raster/io/geotiff/reader/TiffTagsReader.scala
index e39b795a50..f2dd2b6e4d 100644
--- a/raster/src/main/scala/geotrellis/raster/io/geotiff/reader/TiffTagsReader.scala
+++ b/raster/src/main/scala/geotrellis/raster/io/geotiff/reader/TiffTagsReader.scala
@@ -22,14 +22,16 @@ import TagCodes._
import TiffFieldType._
import geotrellis.util.{ByteReader, Filesystem}
import geotrellis.raster.io.geotiff.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import spire.syntax.cfor._
import monocle.syntax.apply._
import java.nio.{ByteBuffer, ByteOrder}
-object TiffTagsReader extends LazyLogging {
+object TiffTagsReader {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def read(path: String): TiffTags =
read(Filesystem.toMappedByteBuffer(path))
diff --git a/s3-testkit/src/main/scala/geotrellis/spark/io/s3/testkit/MockS3Client.scala b/s3-testkit/src/main/scala/geotrellis/spark/io/s3/testkit/MockS3Client.scala
index 63bad8045c..c493275b66 100644
--- a/s3-testkit/src/main/scala/geotrellis/spark/io/s3/testkit/MockS3Client.scala
+++ b/s3-testkit/src/main/scala/geotrellis/spark/io/s3/testkit/MockS3Client.scala
@@ -17,8 +17,8 @@
package geotrellis.spark.io.s3.testkit
import geotrellis.spark.io.s3._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import com.amazonaws.services.s3.model._
import com.amazonaws.services.s3.internal.AmazonS3ExceptionBuilder
import org.apache.commons.io.IOUtils
@@ -29,9 +29,11 @@ import scala.collection.immutable.TreeMap
import scala.collection.JavaConverters._
import scala.collection.mutable
-class MockS3Client() extends S3Client with LazyLogging {
+class MockS3Client() extends S3Client {
import MockS3Client._
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def doesBucketExist(bucket: String): Boolean = buckets.containsKey(bucket)
def doesObjectExist(bucket: String, key: String): Boolean =
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala
index b5cd2ff2c1..bc27fdd73e 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala
@@ -16,7 +16,6 @@
package geotrellis.spark.io.s3
-import com.typesafe.scalalogging.LazyLogging
import com.amazonaws.auth._
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion
import com.amazonaws.retry.PredefinedRetryPolicies
@@ -26,7 +25,7 @@ import java.io.{InputStream, ByteArrayInputStream}
import scala.annotation.tailrec
import scala.collection.JavaConverters._
-trait S3Client extends LazyLogging with Serializable {
+trait S3Client extends Serializable {
def doesBucketExist(bucket: String): Boolean
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala
index 7023338b09..976c595291 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3GeoTiffRDD.scala
@@ -21,8 +21,8 @@ import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.io.RasterReader
import geotrellis.vector._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.conf.Configuration
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
@@ -33,10 +33,12 @@ import java.nio.ByteBuffer
/**
* The S3GeoTiffRDD object allows for the creation of whole or windowed RDD[(K, V)]s from files on S3.
*/
-object S3GeoTiffRDD extends LazyLogging {
+object S3GeoTiffRDD {
final val GEOTIFF_TIME_TAG_DEFAULT = "TIFFTAG_DATETIME"
final val GEOTIFF_TIME_FORMAT_DEFAULT = "yyyy:MM:dd HH:mm:ss"
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
/**
* This case class contains the various parameters one can set when reading RDDs from S3 using Spark.
*
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3InputFormat.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3InputFormat.scala
index 3e89a8fe0a..674bd3af0b 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3InputFormat.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3InputFormat.scala
@@ -17,8 +17,8 @@
package geotrellis.spark.io.s3
import geotrellis.spark.io.hadoop._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import com.amazonaws.regions._
import com.amazonaws.services.s3.model.{ListObjectsRequest, S3ObjectSummary}
import org.apache.hadoop.conf.Configuration
@@ -34,9 +34,11 @@ import scala.util.matching.Regex
* - ProfileCredentialsProvider
* - InstanceProfileCredentialsProvider
*/
-abstract class S3InputFormat[K, V] extends InputFormat[K,V] with LazyLogging {
+abstract class S3InputFormat[K, V] extends InputFormat[K,V] {
import S3InputFormat._
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def getS3Client(context: JobContext): S3Client =
S3InputFormat.getS3Client(context)
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3InputSplit.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3InputSplit.scala
index a1bc6b5504..165d7fd596 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3InputSplit.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3InputSplit.scala
@@ -16,7 +16,6 @@
package geotrellis.spark.io.s3
-import com.typesafe.scalalogging.LazyLogging
import com.amazonaws.services.s3.model.S3ObjectSummary
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce.InputSplit
@@ -27,8 +26,7 @@ import java.io.{DataOutput, DataInput}
* Represents are batch of keys to be read from an S3 bucket.
* AWS credentials have already been discovered and provided by the S3InputFormat.
*/
-class S3InputSplit extends InputSplit with Writable with LazyLogging
-{
+class S3InputSplit extends InputSplit with Writable {
var sessionToken: String = null
var bucket: String = _
var keys: Seq[String] = Vector.empty
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerDeleter.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerDeleter.scala
index 775252dcc8..7cf0310531 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerDeleter.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerDeleter.scala
@@ -18,10 +18,11 @@ package geotrellis.spark.io.s3
import geotrellis.spark.LayerId
import geotrellis.spark.io._
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
-import com.typesafe.scalalogging.LazyLogging
-
-class S3LayerDeleter(val attributeStore: AttributeStore) extends LazyLogging with LayerDeleter[LayerId] {
+class S3LayerDeleter(val attributeStore: AttributeStore) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def getS3Client: () => S3Client = () => S3Client.DEFAULT
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerReader.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerReader.scala
index 74bdb5f9af..b89a2d6196 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerReader.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerReader.scala
@@ -22,7 +22,6 @@ import geotrellis.spark.io.avro._
import geotrellis.spark.io.index._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import spray.json.JsonFormat
@@ -37,7 +36,7 @@ import scala.reflect.ClassTag
* @tparam M Type of Metadata associated with the RDD[(K,V)]
*/
class S3LayerReader(val attributeStore: AttributeStore)(implicit sc: SparkContext)
- extends FilteringLayerReader[LayerId] with LazyLogging {
+ extends FilteringLayerReader[LayerId] {
val defaultNumPartitions = sc.defaultParallelism
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerWriter.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerWriter.scala
index 9da71f269f..9eb765a037 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerWriter.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3LayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import com.amazonaws.services.s3.model.PutObjectRequest
import spray.json._
@@ -48,7 +48,8 @@ class S3LayerWriter(
bucket: String,
keyPrefix: String,
putObjectModifier: PutObjectRequest => PutObjectRequest = { p => p }
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def rddWriter: S3RDDWriter = S3RDDWriter
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/S3RecordReader.scala b/s3/src/main/scala/geotrellis/spark/io/s3/S3RecordReader.scala
index 86696b7d17..af9571aa72 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/S3RecordReader.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/S3RecordReader.scala
@@ -18,15 +18,17 @@ package geotrellis.spark.io.s3
import geotrellis.spark.io.s3.util.S3RangeReader
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import com.amazonaws.services.s3.model.GetObjectRequest
import org.apache.hadoop.mapreduce.{InputSplit, TaskAttemptContext, RecordReader}
import org.apache.commons.io.IOUtils
/** This is the base class for readers that will create key value pairs for object requests.
* Subclass must extend [readObjectRequest] method to map from S3 object requests to (K,V) */
-abstract class BaseS3RecordReader[K, V](s3Client: S3Client) extends RecordReader[K, V] with LazyLogging {
+abstract class BaseS3RecordReader[K, V](s3Client: S3Client) extends RecordReader[K, V] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
protected var bucket: String = _
protected var keys: Iterator[String] = null
protected var curKey: K = _
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGCollectionLayerReader.scala b/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGCollectionLayerReader.scala
index b2dc293453..f9b27c35e4 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGCollectionLayerReader.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGCollectionLayerReader.scala
@@ -25,8 +25,6 @@ import geotrellis.spark.io.index._
import geotrellis.spark.io.s3._
import geotrellis.spark.io.s3.conf.S3Config
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
import spray.json.JsonFormat
import java.net.URI
@@ -41,7 +39,7 @@ class S3COGCollectionLayerReader(
val attributeStore: AttributeStore,
val getS3Client: () => S3Client = () => S3Client.DEFAULT,
val defaultThreads: Int = S3COGCollectionLayerReader.defaultThreadCount
-) extends COGCollectionLayerReader[LayerId] with LazyLogging {
+) extends COGCollectionLayerReader[LayerId] {
implicit def getByteReader(uri: URI): ByteReader = byteReader(uri, getS3Client())
diff --git a/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGLayerReader.scala b/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGLayerReader.scala
index 2e5aabba24..db9e57c27b 100644
--- a/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGLayerReader.scala
+++ b/s3/src/main/scala/geotrellis/spark/io/s3/cog/S3COGLayerReader.scala
@@ -26,7 +26,6 @@ import geotrellis.spark.io.cog._
import geotrellis.spark.io.index._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import spray.json.JsonFormat
import java.net.URI
@@ -42,7 +41,7 @@ class S3COGLayerReader(
val attributeStore: AttributeStore,
val getS3Client: () => S3Client = () => S3Client.DEFAULT,
val defaultThreads: Int = S3COGLayerReader.defaultThreadCount
-)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] with LazyLogging {
+)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] {
val defaultNumPartitions: Int = sc.defaultParallelism
diff --git a/spark-etl/src/main/scala/geotrellis/spark/etl/Etl.scala b/spark-etl/src/main/scala/geotrellis/spark/etl/Etl.scala
index dba2aea749..3b2d7685b1 100644
--- a/spark-etl/src/main/scala/geotrellis/spark/etl/Etl.scala
+++ b/spark-etl/src/main/scala/geotrellis/spark/etl/Etl.scala
@@ -32,8 +32,8 @@ import geotrellis.spark.tiling._
import geotrellis.util._
import geotrellis.vector._
import geotrellis.spark.etl.config._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark._
import org.apache.spark.rdd.RDD
@@ -75,8 +75,9 @@ object Etl {
}
}
-case class Etl(conf: EtlConf, @transient modules: Seq[TypedModule] = Etl.defaultModules) extends LazyLogging {
+case class Etl(conf: EtlConf, @transient modules: Seq[TypedModule] = Etl.defaultModules) {
import Etl.SaveAction
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val input = conf.input
val output = conf.output
diff --git a/spark-etl/src/main/scala/geotrellis/spark/etl/accumulo/AccumuloOutput.scala b/spark-etl/src/main/scala/geotrellis/spark/etl/accumulo/AccumuloOutput.scala
index 385eb15082..5f19555b9c 100644
--- a/spark-etl/src/main/scala/geotrellis/spark/etl/accumulo/AccumuloOutput.scala
+++ b/spark-etl/src/main/scala/geotrellis/spark/etl/accumulo/AccumuloOutput.scala
@@ -19,10 +19,12 @@ package geotrellis.spark.etl.accumulo
import geotrellis.spark.etl.OutputPlugin
import geotrellis.spark.etl.config.{AccumuloProfile, BackendProfile, EtlConf}
import geotrellis.spark.io.accumulo.{AccumuloAttributeStore, AccumuloWriteStrategy, HdfsWriteStrategy, SocketWriteStrategy}
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
-import com.typesafe.scalalogging.LazyLogging
+trait AccumuloOutput[K, V, M] extends OutputPlugin[K, V, M] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
-trait AccumuloOutput[K, V, M] extends OutputPlugin[K, V, M] with LazyLogging {
val name = "accumulo"
def strategy(profile: Option[BackendProfile]): AccumuloWriteStrategy = profile match {
@@ -45,6 +47,6 @@ trait AccumuloOutput[K, V, M] extends OutputPlugin[K, V, M] with LazyLogging {
}
case _ => throw new Exception("Backend profile not matches backend type")
}
-
+
def attributes(conf: EtlConf) = AccumuloAttributeStore(getInstance(conf.outputProfile).connector)
}
diff --git a/spark-etl/src/main/scala/geotrellis/spark/etl/config/BaseEtlConf.scala b/spark-etl/src/main/scala/geotrellis/spark/etl/config/BaseEtlConf.scala
index 1807d585d4..35e30be98d 100644
--- a/spark-etl/src/main/scala/geotrellis/spark/etl/config/BaseEtlConf.scala
+++ b/spark-etl/src/main/scala/geotrellis/spark/etl/config/BaseEtlConf.scala
@@ -17,14 +17,16 @@
package geotrellis.spark.etl.config
import geotrellis.spark.etl.config.json._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.SparkContext
import spray.json._
import scala.collection.JavaConverters._
-trait BaseEtlConf extends ConfigParse with LazyLogging {
+trait BaseEtlConf extends ConfigParse {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
private def loggerError(str: String, color: String = Console.RED) = logger.error(s"${color}${str}${Console.RESET}")
val help = """
diff --git a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Output.scala b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Output.scala
index 6a06603c3f..7a235b29a4 100644
--- a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Output.scala
+++ b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/ast/Output.scala
@@ -25,8 +25,6 @@ import geotrellis.spark.io.LayerWriter
import geotrellis.spark.io.avro.AvroRecordCodec
import geotrellis.spark.tiling.LayoutDefinition
import geotrellis.util.{Component, GetComponent}
-
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import spray.json.JsonFormat
@@ -34,7 +32,7 @@ import scala.reflect.ClassTag
trait Output[T] extends Node[T]
-object Output extends LazyLogging {
+object Output {
def write[
K: SpatialComponent : AvroRecordCodec : JsonFormat : ClassTag,
V <: CellGrid : AvroRecordCodec : ClassTag: ? => TileMergeMethods[V]: ? => TilePrototypeMethods[V],
diff --git a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/Implicits.scala b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/Implicits.scala
index 463b3d9f7e..7078e6b8bd 100644
--- a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/Implicits.scala
+++ b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/Implicits.scala
@@ -28,7 +28,6 @@ import geotrellis.raster.resample._
import geotrellis.spark.tiling._
import geotrellis.vector._
-import com.typesafe.scalalogging.LazyLogging
import _root_.io.circe.generic.extras.Configuration
import _root_.io.circe._
import _root_.io.circe.syntax._
@@ -42,7 +41,7 @@ import scala.util.Try
object Implicits extends Implicits
-trait Implicits extends LazyLogging {
+trait Implicits {
implicit val config: Configuration = Configuration.default.withDefaults.withSnakeCaseMemberNames
val pipelineJsonPrinter: Printer = Printer.spaces2.copy(dropNullValues = true)
diff --git a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/PipelineExpr.scala b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/PipelineExpr.scala
index 02cccc61df..e94768c353 100644
--- a/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/PipelineExpr.scala
+++ b/spark-pipeline/src/main/scala/geotrellis/spark/pipeline/json/PipelineExpr.scala
@@ -17,9 +17,8 @@
package geotrellis.spark.pipeline.json
import geotrellis.spark.pipeline.PipelineConstructor
-import com.typesafe.scalalogging.LazyLogging
-trait PipelineExpr extends LazyLogging {
+trait PipelineExpr {
def ~(other: PipelineExpr): PipelineConstructor = this :: other :: Nil
def ~(other: Option[PipelineExpr]): PipelineConstructor =
diff --git a/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala b/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala
index 1ab59724cd..ff992ef488 100644
--- a/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/GeoTiffInfoReader.scala
@@ -20,14 +20,16 @@ import geotrellis.raster.io.geotiff.reader.GeoTiffReader.GeoTiffInfo
import geotrellis.vector.Geometry
import geotrellis.raster.GridBounds
import geotrellis.raster.io.geotiff.GeoTiffSegmentLayoutTransform
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import java.net.URI
-private [geotrellis] trait GeoTiffInfoReader extends LazyLogging {
+private [geotrellis] trait GeoTiffInfoReader {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def geoTiffInfoRDD(implicit sc: SparkContext): RDD[String]
def getGeoTiffInfo(uri: String): GeoTiffInfo
diff --git a/spark/src/main/scala/geotrellis/spark/io/LayerWriter.scala b/spark/src/main/scala/geotrellis/spark/io/LayerWriter.scala
index 46891be464..828045e51b 100644
--- a/spark/src/main/scala/geotrellis/spark/io/LayerWriter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/LayerWriter.scala
@@ -22,8 +22,6 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
import org.apache.avro._
import org.apache.spark.rdd.RDD
import spray.json._
@@ -172,7 +170,7 @@ trait LayerWriter[ID] {
}
}
-object LayerWriter extends LazyLogging {
+object LayerWriter {
/**
* Produce LayerWriter instance based on URI description.
diff --git a/spark/src/main/scala/geotrellis/spark/io/cog/COGLayerWriter.scala b/spark/src/main/scala/geotrellis/spark/io/cog/COGLayerWriter.scala
index da954b3d05..d98bfe317f 100644
--- a/spark/src/main/scala/geotrellis/spark/io/cog/COGLayerWriter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/cog/COGLayerWriter.scala
@@ -27,8 +27,8 @@ import geotrellis.raster.io.geotiff.compression.{Compression, NoCompression}
import geotrellis.spark._
import geotrellis.spark.io.{AttributeNotFoundError, AttributeStore, LayerNotFoundError, LayerOutOfKeyBoundsError, Writer}
import geotrellis.spark.io.index._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._
@@ -37,8 +37,9 @@ import java.util.ServiceLoader
import scala.reflect._
-trait COGLayerWriter extends LazyLogging with Serializable {
+trait COGLayerWriter extends Serializable {
import COGLayerWriter.Options
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val attributeStore: AttributeStore
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/FileCollectionLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/file/FileCollectionLayerReader.scala
index 08de20e539..cab707d3c3 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/FileCollectionLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/FileCollectionLayerReader.scala
@@ -22,7 +22,6 @@ import geotrellis.spark.io.avro._
import geotrellis.spark.io.index._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import spray.json.JsonFormat
import scala.reflect.ClassTag
@@ -38,7 +37,7 @@ import scala.reflect.ClassTag
class FileCollectionLayerReader(
val attributeStore: AttributeStore,
catalogPath: String
-) extends CollectionLayerReader[LayerId] with LazyLogging {
+) extends CollectionLayerReader[LayerId] {
def read[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
@@ -76,5 +75,3 @@ object FileCollectionLayerReader {
def apply(attributeStore: FileAttributeStore): FileCollectionLayerReader =
apply(attributeStore, attributeStore.catalogPath)
}
-
-
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerDeleter.scala b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerDeleter.scala
index 987f0c0f3e..f7eafa2634 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerDeleter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerDeleter.scala
@@ -19,11 +19,13 @@ package geotrellis.spark.io.file
import geotrellis.spark._
import geotrellis.spark.io._
-import com.typesafe.scalalogging.LazyLogging
-
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import java.io.File
-class FileLayerDeleter(val attributeStore: FileAttributeStore) extends LazyLogging with LayerDeleter[LayerId] {
+class FileLayerDeleter(val attributeStore: FileAttributeStore) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def delete(id: LayerId): Unit =
try {
val header = attributeStore.readHeader[FileLayerHeader](id)
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerReader.scala
index b22d652f66..96a3445ade 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerReader.scala
@@ -22,7 +22,6 @@ import geotrellis.spark.io.avro._
import geotrellis.spark.io.index._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import spray.json.JsonFormat
@@ -39,7 +38,7 @@ import scala.reflect.ClassTag
class FileLayerReader(
val attributeStore: AttributeStore,
catalogPath: String
-)(implicit sc: SparkContext) extends FilteringLayerReader[LayerId] with LazyLogging {
+)(implicit sc: SparkContext) extends FilteringLayerReader[LayerId] {
val defaultNumPartitions = sc.defaultParallelism
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerWriter.scala b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerWriter.scala
index 5d2bee2f64..fe5870f0db 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/FileLayerWriter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/FileLayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd.RDD
import spray.json._
@@ -47,7 +47,8 @@ import java.io.File
class FileLayerWriter(
val attributeStore: AttributeStore,
catalogPath: String
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
// Layer Updating
def overwrite[
@@ -93,7 +94,7 @@ class FileLayerWriter(
attributeStore.writeLayerAttributes(id, header, metadata, keyIndex, writerSchema)
FileRDDWriter.update[K, V](rdd, layerPath, keyPath, Some(writerSchema), mergeFunc)
-
+
case None =>
logger.warn(s"Skipping update with empty bounds for $id.")
}
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGCollectionLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGCollectionLayerReader.scala
index 50409f316c..2e0658fa4e 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGCollectionLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGCollectionLayerReader.scala
@@ -25,7 +25,6 @@ import geotrellis.spark.io.file.{FileAttributeStore, KeyPathGenerator}
import geotrellis.spark.io.file.conf.FileConfig
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import spray.json.JsonFormat
import java.net.URI
import java.io.File
@@ -42,7 +41,7 @@ class FileCOGCollectionLayerReader(
val catalogPath: String,
val defaultThreads: Int = FileCOGCollectionLayerReader.defaultThreadCount
)
- extends COGCollectionLayerReader[LayerId] with LazyLogging {
+ extends COGCollectionLayerReader[LayerId] {
implicit def getByteReader(uri: URI): ByteReader = byteReader(uri)
diff --git a/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGLayerReader.scala
index 9e7cfb0736..62972cd674 100644
--- a/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/file/cog/FileCOGLayerReader.scala
@@ -25,7 +25,6 @@ import geotrellis.spark.io.file.{FileAttributeStore, FileLayerHeader, KeyPathGen
import geotrellis.spark.io.file.conf.FileConfig
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import spray.json.JsonFormat
@@ -43,7 +42,7 @@ class FileCOGLayerReader(
val attributeStore: AttributeStore,
val catalogPath: String,
val defaultThreads: Int = FileCOGLayerReader.defaultThreadCount
-)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] with LazyLogging {
+)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] {
val defaultNumPartitions: Int = sc.defaultParallelism
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopCollectionLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopCollectionLayerReader.scala
index 1ac6e2df12..4946f8040c 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopCollectionLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopCollectionLayerReader.scala
@@ -21,7 +21,6 @@ import geotrellis.spark.io._
import geotrellis.spark.io.avro._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
@@ -39,7 +38,7 @@ class HadoopCollectionLayerReader(
conf: Configuration,
maxOpenFiles: Int = 16
)
- extends CollectionLayerReader[LayerId] with LazyLogging {
+ extends CollectionLayerReader[LayerId] {
def read[
K: AvroRecordCodec: Boundable: JsonFormat: ClassTag,
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala
index 90b0c3dad3..2f63494a3f 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopGeoTiffRDD.scala
@@ -23,8 +23,8 @@ import geotrellis.spark._
import geotrellis.spark.io.hadoop.formats._
import geotrellis.spark.io.RasterReader
import geotrellis.vector._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
@@ -37,10 +37,12 @@ import java.nio.ByteBuffer
/**
* Allows for reading of whole or windowed GeoTiff as RDD[(K, V)]s through Hadoop FileSystem API.
*/
-object HadoopGeoTiffRDD extends LazyLogging {
+object HadoopGeoTiffRDD {
final val GEOTIFF_TIME_TAG_DEFAULT = "TIFFTAG_DATETIME"
final val GEOTIFF_TIME_FORMAT_DEFAULT = "yyyy:MM:dd HH:mm:ss"
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
/**
* This case class contains the various parameters one can set when reading RDDs from Hadoop using Spark.
*
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerDeleter.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerDeleter.scala
index 65deaa997b..11612341b3 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerDeleter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerDeleter.scala
@@ -18,13 +18,15 @@ package geotrellis.spark.io.hadoop
import geotrellis.spark._
import geotrellis.spark.io._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark._
-class HadoopLayerDeleter(val attributeStore: AttributeStore, conf: Configuration) extends LazyLogging with LayerDeleter[LayerId] {
+class HadoopLayerDeleter(val attributeStore: AttributeStore, conf: Configuration) extends LayerDeleter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def delete(id: LayerId): Unit = {
try {
val header = attributeStore.readHeader[HadoopLayerHeader](id)
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerReader.scala
index 83727a8a77..93cb0a4640 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerReader.scala
@@ -21,7 +21,6 @@ import geotrellis.spark.io._
import geotrellis.spark.io.avro._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
@@ -40,7 +39,7 @@ import scala.reflect.ClassTag
class HadoopLayerReader(
val attributeStore: AttributeStore
)(implicit sc: SparkContext)
- extends FilteringLayerReader[LayerId] with LazyLogging {
+ extends FilteringLayerReader[LayerId] {
val defaultNumPartitions = sc.defaultParallelism
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerWriter.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerWriter.scala
index a81078f1ca..02a1bba147 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerWriter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopLayerWriter.scala
@@ -23,8 +23,8 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.index.KeyIndex
import geotrellis.spark.merge._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.fs.Path
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
@@ -37,7 +37,8 @@ class HadoopLayerWriter(
rootPath: Path,
val attributeStore: AttributeStore,
indexInterval: Int = 4
-) extends LayerWriter[LayerId] with LazyLogging {
+) extends LayerWriter[LayerId] {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
// Layer Updating
def overwrite[
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDReader.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDReader.scala
index 4bbc7c05e1..364e911e94 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDReader.scala
@@ -21,8 +21,8 @@ import geotrellis.spark.io.avro._
import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.hadoop.formats._
import geotrellis.spark.util.KryoWrapper
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.avro.Schema
import org.apache.hadoop.io._
import org.apache.hadoop.fs.Path
@@ -30,7 +30,8 @@ import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
-object HadoopRDDReader extends LazyLogging {
+object HadoopRDDReader {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def readFully[
K: AvroRecordCodec: Boundable,
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDWriter.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDWriter.scala
index 2627a1de7b..fe074b58c1 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDWriter.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HadoopRDDWriter.scala
@@ -24,8 +24,8 @@ import geotrellis.spark.io.hadoop.formats.FilterMapFileInputFormat
import geotrellis.spark.io.index._
import geotrellis.spark.partition._
import geotrellis.spark.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io._
import org.apache.spark.rdd._
@@ -34,7 +34,9 @@ import org.apache.hadoop.conf.Configuration
import scala.reflect._
import scala.collection.mutable
-object HadoopRDDWriter extends LazyLogging {
+object HadoopRDDWriter {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
/** Index innterval at which map files should store an offset into sequence file.
* This value is picked as a compromize between in-memory footprint and IO cost of retreiving a single record.
*/
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/HdfsUtils.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/HdfsUtils.scala
index 8ef51b6ad8..d275bbf2d5 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/HdfsUtils.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/HdfsUtils.scala
@@ -16,7 +16,8 @@
package geotrellis.spark.io.hadoop
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.hadoop.io.compress.CompressionCodecFactory
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
@@ -32,7 +33,8 @@ import scala.util.Random
abstract class LineScanner extends Iterator[String] with java.io.Closeable
-object HdfsUtils extends LazyLogging {
+object HdfsUtils {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def pathExists(path: Path, conf: Configuration): Boolean =
path.getFileSystem(conf).exists(path)
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGCollectionLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGCollectionLayerReader.scala
index 378e067fec..21e970f955 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGCollectionLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGCollectionLayerReader.scala
@@ -26,7 +26,6 @@ import geotrellis.spark.io.hadoop.conf.HadoopConfig
import geotrellis.spark.io.hadoop._
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import spray.json.JsonFormat
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
@@ -47,7 +46,7 @@ class HadoopCOGCollectionLayerReader(
val conf: SerializableConfiguration = SerializableConfiguration(new Configuration),
val defaultThreads: Int = HadoopCOGCollectionLayerReader.defaultThreadCount
)
- extends COGCollectionLayerReader[LayerId] with LazyLogging {
+ extends COGCollectionLayerReader[LayerId] {
implicit def getByteReader(uri: URI): ByteReader = byteReader(uri, conf.value)
@@ -84,4 +83,3 @@ object HadoopCOGCollectionLayerReader {
def apply(rootPath: Path, conf: Configuration): HadoopCOGCollectionLayerReader =
apply(HadoopAttributeStore(rootPath, conf))
}
-
diff --git a/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGLayerReader.scala b/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGLayerReader.scala
index e3ce8b078a..4329fba4c3 100644
--- a/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGLayerReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/hadoop/cog/HadoopCOGLayerReader.scala
@@ -26,7 +26,6 @@ import geotrellis.spark.io.hadoop._
import geotrellis.spark.io.hadoop.conf.HadoopConfig
import geotrellis.util._
-import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
import spray.json.JsonFormat
@@ -43,7 +42,7 @@ import scala.reflect.ClassTag
class HadoopCOGLayerReader(
val attributeStore: AttributeStore,
val defaultThreads: Int = HadoopCOGLayerReader.defaultThreadCount
-)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] with LazyLogging {
+)(@transient implicit val sc: SparkContext) extends COGLayerReader[LayerId] {
val hadoopConfiguration = SerializableConfiguration(sc.hadoopConfiguration)
diff --git a/spark/src/main/scala/geotrellis/spark/io/http/util/HttpRangeReader.scala b/spark/src/main/scala/geotrellis/spark/io/http/util/HttpRangeReader.scala
index a1e3f9a286..fb4dbfda26 100644
--- a/spark/src/main/scala/geotrellis/spark/io/http/util/HttpRangeReader.scala
+++ b/spark/src/main/scala/geotrellis/spark/io/http/util/HttpRangeReader.scala
@@ -19,8 +19,8 @@ package geotrellis.spark.io.http.util
import geotrellis.util.RangeReader
import scalaj.http.Http
-import com.typesafe.scalalogging.LazyLogging
-
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import java.net.{URL, URI}
import scala.util.Try
@@ -31,7 +31,8 @@ import scala.util.Try
*
* @param url: A [[URL]] pointing to the desired GeoTiff.
*/
-class HttpRangeReader(url: URL, useHeadRequest: Boolean) extends RangeReader with LazyLogging {
+class HttpRangeReader(url: URL, useHeadRequest: Boolean) extends RangeReader {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val request = Http(url.toString)
diff --git a/spark/src/main/scala/geotrellis/spark/pyramid/Pyramid.scala b/spark/src/main/scala/geotrellis/spark/pyramid/Pyramid.scala
index ecd19f9d2b..665b45e46a 100644
--- a/spark/src/main/scala/geotrellis/spark/pyramid/Pyramid.scala
+++ b/spark/src/main/scala/geotrellis/spark/pyramid/Pyramid.scala
@@ -29,7 +29,6 @@ import geotrellis.raster.prototype._
import geotrellis.util._
import geotrellis.vector.Extent
-import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.Partitioner
import org.apache.spark.rdd._
import org.apache.spark.storage.StorageLevel
@@ -70,7 +69,7 @@ case class Pyramid[
}
-object Pyramid extends LazyLogging {
+object Pyramid {
case class Options(
resampleMethod: ResampleMethod = NearestNeighbor,
partitioner: Option[Partitioner] = None
diff --git a/spark/src/main/scala/geotrellis/spark/reproject/TileRDDReproject.scala b/spark/src/main/scala/geotrellis/spark/reproject/TileRDDReproject.scala
index c0117848cf..af19d28e73 100644
--- a/spark/src/main/scala/geotrellis/spark/reproject/TileRDDReproject.scala
+++ b/spark/src/main/scala/geotrellis/spark/reproject/TileRDDReproject.scala
@@ -30,14 +30,16 @@ import geotrellis.spark.merge._
import geotrellis.spark.tiling._
import geotrellis.vector._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd._
import org.apache.spark._
import scala.reflect.ClassTag
-object TileRDDReproject extends LazyLogging {
+object TileRDDReproject {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
import Reproject.Options
/** Reproject a set of buffered
@@ -191,7 +193,7 @@ object TileRDDReproject extends LazyLogging {
val (raster, destRE, destRegion) = tup
rrp.regionReproject(raster, crs, destCrs, destRE, destRegion, rasterReprojectOptions.method, rasterReprojectOptions.errorThreshold).tile
}
-
+
def mergeValues(reprojectedTile: V, toReproject: (Raster[V], RasterExtent, Polygon)) = {
val (raster, destRE, destRegion) = toReproject
val destRaster = Raster(reprojectedTile, destRE.extent)
diff --git a/spark/src/main/scala/geotrellis/spark/tiling/CutTiles.scala b/spark/src/main/scala/geotrellis/spark/tiling/CutTiles.scala
index aa945a2c21..1982e02228 100644
--- a/spark/src/main/scala/geotrellis/spark/tiling/CutTiles.scala
+++ b/spark/src/main/scala/geotrellis/spark/tiling/CutTiles.scala
@@ -22,13 +22,15 @@ import geotrellis.raster.prototype._
import geotrellis.raster.resample._
import geotrellis.spark._
import geotrellis.util._
-
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import org.apache.spark.rdd._
import scala.reflect.ClassTag
-object CutTiles extends LazyLogging {
+object CutTiles {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def apply[
K1: (? => TilerKeyMethods[K1, K2]),
K2: SpatialComponent: ClassTag,
diff --git a/spark/src/main/scala/geotrellis/spark/util/SparkUtils.scala b/spark/src/main/scala/geotrellis/spark/util/SparkUtils.scala
index 0b04e2adf3..ec6feebbe6 100644
--- a/spark/src/main/scala/geotrellis/spark/util/SparkUtils.scala
+++ b/spark/src/main/scala/geotrellis/spark/util/SparkUtils.scala
@@ -16,14 +16,16 @@
package geotrellis.spark.util
-import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
-
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
import java.io.File
-object SparkUtils extends LazyLogging {
+object SparkUtils {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
def createSparkConf = new SparkConf()
private val gtHomeLock = new Object()
diff --git a/vector/src/main/scala/geotrellis/vector/GeomFactory.scala b/vector/src/main/scala/geotrellis/vector/GeomFactory.scala
index 76db211477..04634a41c3 100644
--- a/vector/src/main/scala/geotrellis/vector/GeomFactory.scala
+++ b/vector/src/main/scala/geotrellis/vector/GeomFactory.scala
@@ -18,12 +18,11 @@ package geotrellis.vector
import geotrellis.vector.conf.JtsConfig
-import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom
import org.locationtech.jts.geom.{GeometryFactory, PrecisionModel}
import org.locationtech.jts.precision.GeometryPrecisionReducer
-object GeomFactory extends LazyLogging {
+object GeomFactory {
val precisionType: String = JtsConfig.precisionType
val precisionModel: PrecisionModel = JtsConfig.precisionModel
lazy val simplifier: GeometryPrecisionReducer = JtsConfig.simplifier
diff --git a/vector/src/main/scala/geotrellis/vector/conf/JtsConfig.scala b/vector/src/main/scala/geotrellis/vector/conf/JtsConfig.scala
index 6e6b932d23..378983e9ea 100644
--- a/vector/src/main/scala/geotrellis/vector/conf/JtsConfig.scala
+++ b/vector/src/main/scala/geotrellis/vector/conf/JtsConfig.scala
@@ -16,7 +16,6 @@
package geotrellis.vector.conf
-import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom.PrecisionModel
import org.locationtech.jts.precision.GeometryPrecisionReducer
@@ -25,7 +24,7 @@ case class Simplification(scale: Double = 1e12) {
lazy val simplifier: GeometryPrecisionReducer = new GeometryPrecisionReducer(new PrecisionModel(scale))
}
case class Precision(`type`: String = "floating")
-case class JtsConfig(precision: Precision = Precision(), simplification: Simplification = Simplification()) extends LazyLogging {
+case class JtsConfig(precision: Precision = Precision(), simplification: Simplification = Simplification()) {
val precisionType: String = precision.`type`
val precisionModel: PrecisionModel = precisionType match {
case "floating" => new PrecisionModel()
diff --git a/vector/src/main/scala/geotrellis/vector/io/WKB/WKB.scala b/vector/src/main/scala/geotrellis/vector/io/WKB/WKB.scala
index 45a6768982..004f4a9f43 100644
--- a/vector/src/main/scala/geotrellis/vector/io/WKB/WKB.scala
+++ b/vector/src/main/scala/geotrellis/vector/io/WKB/WKB.scala
@@ -18,14 +18,16 @@ package geotrellis.vector.io.wkb
import geotrellis.vector._
-import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.io.WKBReader
-
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
/** A thread-safe wrapper for the [https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary WKB]
* Writer and Reader
*/
-object WKB extends LazyLogging {
+object WKB {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
private val readerBox = new ThreadLocal[WKBReader]
private val writerBox = new ThreadLocal[WKBWriter]
diff --git a/vector/src/main/scala/geotrellis/vector/io/WKT/WKT.scala b/vector/src/main/scala/geotrellis/vector/io/WKT/WKT.scala
index a2d0f36c16..a2c273bd24 100644
--- a/vector/src/main/scala/geotrellis/vector/io/WKT/WKT.scala
+++ b/vector/src/main/scala/geotrellis/vector/io/WKT/WKT.scala
@@ -19,10 +19,13 @@ package geotrellis.vector.io.wkt
import geotrellis.vector._
import org.locationtech.jts.io.{WKTReader, WKTWriter}
-import com.typesafe.scalalogging.LazyLogging
+import org.slf4j.LoggerFactory
+import com.typesafe.scalalogging.Logger
/** A thread-safe wrapper for the WKT Writer and Reader */
-object WKT extends LazyLogging {
+object WKT {
+ @transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
+
private val readerBox = new ThreadLocal[WKTReader]
private val writerBox = new ThreadLocal[WKTWriter]