Skip to content

Commit

Permalink
issue #1332 - Upgrade to Scala 2.13
Browse files Browse the repository at this point in the history
Snyk has created this PR to upgrade org.scala-lang:scala-library from 2.12.19 to 2.13.14.

See this package in maven:
org.scala-lang:scala-library

See this project in Snyk:
https://app.snyk.io/org/wajda/project/cb08aba9-ae42-4c2d-8b5f-f02ed8d0264b?utm_source=github&utm_medium=referral&page=upgrade-pr
  • Loading branch information
snyk-bot authored and wajda committed Jun 3, 2024
1 parent b6fead7 commit 9836cb1
Show file tree
Hide file tree
Showing 64 changed files with 4,598 additions and 5,477 deletions.
11 changes: 6 additions & 5 deletions admin/src/main/scala/za/co/absa/spline/admin/DateTimeUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,17 @@

package za.co.absa.spline.admin

import org.slf4s.Logging

import com.typesafe.scalalogging.LazyLogging

import java.time.format.{DateTimeFormatter, DateTimeFormatterBuilder}
import java.time.temporal.ChronoField
import java.time.{LocalDateTime, ZoneId, ZoneOffset, ZonedDateTime}
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}

object DateTimeUtils extends Logging {
object DateTimeUtils extends LazyLogging {

private val ZonedDateTimeRegexp = (s"" +
"^" +
Expand Down Expand Up @@ -60,12 +61,12 @@ object DateTimeUtils extends Logging {

val validOffsets = tz.getRules.getValidOffsets(ldt).asScala
if (validOffsets.isEmpty) {
log.warn(s"" +
logger.warn("" +
s"DST gap was detected for the input '$s' in the time zone '$tz'. " +
s"Continue with the adjusted datetime '$zdt''")
}
if (validOffsets.length > 1) {
log.warn(s"" +
logger.warn("" +
s"DST overlap (${validOffsets.mkString(", ")}) was detected for the input '$s' in the time zone '$tz'. " +
s"Continue with the assumed datetime '$zdt'")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import scala.collection.immutable.ListSet

trait UserInteractor {
def credentializeConnectionUrl(url: ArangoConnectionURL): ArangoConnectionURL

def confirmDatabaseBackupReady(): Boolean
}

Expand Down Expand Up @@ -82,7 +83,7 @@ class ConsoleUserInteractor(console: InputConsole) extends UserInteractor {
|Have you created a database backup? [${validAnswers.mkString("/")}]:\u00A0
""".stripMargin.trim

def userAnswers: Stream[String] = console.readLine(msg).trim.toLowerCase #:: userAnswers
def userAnswers: LazyList[String] = console.readLine(msg).trim.toLowerCase #:: userAnswers

val userAnswer = userAnswers.filter(validAnswers).head

Expand Down
88 changes: 44 additions & 44 deletions admin/src/main/scala/za/co/absa/spline/arango/ArangoManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import com.arangodb.async.ArangoDatabaseAsync
import com.arangodb.entity.{EdgeDefinition, IndexType}
import com.arangodb.model.Implicits.IndexOptionsOps
import com.arangodb.model._
import org.slf4s.Logging
import com.typesafe.scalalogging.StrictLogging
import za.co.absa.commons.reflect.EnumerationMacros.sealedInstancesOf
import za.co.absa.commons.version.impl.SemVer20Impl.SemanticVersion
import za.co.absa.spline.arango.OnDBExistsAction.{Drop, Skip}
Expand All @@ -30,7 +30,7 @@ import za.co.absa.spline.persistence.model.{CollectionDef, GraphDef, SearchAnaly
import za.co.absa.spline.persistence.{DatabaseVersionManager, DryRunnable}

import java.time.{Clock, ZonedDateTime}
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.collection.immutable._
import scala.compat.java8.FutureConverters._
import scala.concurrent.duration.Duration
Expand Down Expand Up @@ -61,15 +61,15 @@ class ArangoManagerImpl(
(implicit val ex: ExecutionContext)
extends ArangoManager
with DryRunnable
with Logging {
with StrictLogging {

import ArangoManagerImpl._

def createDatabase(onExistsAction: OnDBExistsAction, options: DatabaseCreateOptions): Future[Boolean] = {
log.debug("Initialize database")
logger.debug("Initialize database")
db.exists.toScala.flatMap { exists =>
if (exists && onExistsAction == Skip) {
log.debug("Database already exists - skipping initialization")
logger.debug("Database already exists - skipping initialization")
Future.successful(false)
} else for {
_ <- deleteDbIfRequested(onExistsAction == Drop)
Expand All @@ -86,20 +86,20 @@ class ArangoManagerImpl(
}

override def upgrade(): Future[Unit] = {
log.debug("Upgrade database")
logger.debug("Upgrade database")
dbVersionManager.currentVersion
.flatMap(currentVersion => {
log.info(s"Current database version: ${currentVersion.asString}")
log.info(s"Target database version: ${appDBVersion.asString}")
logger.info(s"Current database version: ${currentVersion.asString}")
logger.info(s"Target database version: ${appDBVersion.asString}")
if (currentVersion == appDBVersion) Future.successful {
log.info(s"The database is up-to-date")
logger.info(s"The database is up-to-date")
} else if (currentVersion > appDBVersion) Future.failed {
new RuntimeException("Database downgrade is not supported")
} else for {
_ <- deleteFoxxServices()
_ <- migrator.migrate(currentVersion, appDBVersion)
_ <- createFoxxServices()
} yield {}
} yield ()
})
}

Expand All @@ -115,7 +115,7 @@ class ArangoManagerImpl(
case AuxiliaryDBAction.SearchViewsCreate => createSearchViews()
case AuxiliaryDBAction.SearchAnalyzerDelete => deleteSearchAnalyzers()
case AuxiliaryDBAction.SearchAnalyzerCreate => createSearchAnalyzers()
}).map(_ => {}))
}).map(_ => ()))
}
}

Expand All @@ -127,16 +127,16 @@ class ArangoManagerImpl(
for {
_ <- deleteFoxxServices()
_ <- createFoxxServices()
} yield {}
} yield ()
}

override def prune(retentionPeriod: Duration): Future[Unit] = {
log.debug(s"Prune data older than $retentionPeriod")
logger.debug(s"Prune data older than $retentionPeriod")
dataRetentionManager.pruneBefore(clock.millis - retentionPeriod.toMillis)
}

override def prune(dateTime: ZonedDateTime): Future[Unit] = {
log.debug(s"Prune data before $dateTime")
logger.debug(s"Prune data before $dateTime")
dataRetentionManager.pruneBefore(dateTime.toInstant.toEpochMilli)
}

Expand All @@ -146,20 +146,20 @@ class ArangoManagerImpl(
_ <- if (exists && !dropIfExists)
throw new IllegalArgumentException(s"Arango Database ${db.name} already exists")
else if (exists && dropIfExists) {
log.info(s"Drop database: ${db.name}")
logger.info(s"Drop database: ${db.name}")
unlessDryRunAsync(db.drop().toScala)
}
else Future.successful({})
} yield {}
else Future.successful(())
} yield ()
}

private def createDb() = {
log.info(s"Create database: ${db.name}")
logger.info(s"Create database: ${db.name}")
unlessDryRunAsync(db.create().toScala)
}

private def createCollections(options: DatabaseCreateOptions) = {
log.debug(s"Create collections")
logger.debug(s"Create collections")
Future.sequence(
for (colDef <- sealedInstancesOf[CollectionDef])
yield {
Expand All @@ -180,7 +180,7 @@ class ArangoManagerImpl(
}

private def createGraphs() = {
log.debug(s"Create graphs")
logger.debug(s"Create graphs")
Future.sequence(
for (graphDef <- sealedInstancesOf[GraphDef]) yield {
val edgeDefs = graphDef.edgeDefs.map(e =>
Expand All @@ -193,28 +193,28 @@ class ArangoManagerImpl(
}

private def deleteIndices() = {
log.info(s"Drop indices")
logger.info(s"Drop indices")
for {
colEntities <- db.getCollections.toScala.map(_.asScala.filter(!_.getIsSystem))
eventualIndices = colEntities.map(ce => db.collection(ce.getName).getIndexes.toScala.map(_.asScala.map(ce.getName -> _)))
allIndices <- Future.reduceLeft(Iterable(eventualIndices.toSeq: _*))(_ ++ _)
userIndices = allIndices.filter { case (_, idx) => idx.getType != IndexType.primary && idx.getType != IndexType.edge }
_ <- Future.traverse(userIndices) { case (colName, idx) =>
log.debug(s"Drop ${idx.getType} index: $colName.${idx.getName}")
logger.debug(s"Drop ${idx.getType} index: $colName.${idx.getName}")
unlessDryRunAsync(db.deleteIndex(idx.getId).toScala)
}
} yield {}
} yield ()
}

private def createIndices() = {
log.info(s"Create indices")
logger.info(s"Create indices")
Future.sequence(
for {
colDef <- sealedInstancesOf[CollectionDef]
idxDef <- colDef.indexDefs ++ colDef.commonIndexDefs
} yield {
val idxOpts = idxDef.options
log.debug(s"Ensure ${idxOpts.indexType} index: ${colDef.name} [${idxDef.fields.mkString(",")}]")
logger.debug(s"Ensure ${idxOpts.indexType} index: ${colDef.name} [${idxDef.fields.mkString(",")}]")
val dbCol = db.collection(colDef.name)
val fields = idxDef.fields.asJava
unlessDryRunAsync {
Expand All @@ -229,69 +229,69 @@ class ArangoManagerImpl(
})
}

private def createFoxxServices(): Future[_] = {
log.debug(s"Lookup Foxx services to install")
private def createFoxxServices(): Future[Unit] = {
logger.debug(s"Lookup Foxx services to install")
val serviceDefs = FoxxSourceResolver.lookupSources(FoxxSourcesLocation)
log.debug(s"Found Foxx services: ${serviceDefs.map(_._1) mkString ", "}")
logger.debug(s"Found Foxx services: ${serviceDefs.map(_._1) mkString ", "}")
Future.traverse(serviceDefs.toSeq) {
case (name, content) =>
val srvMount = s"/$name"
log.info(s"Install Foxx service: $srvMount")
logger.info(s"Install Foxx service: $srvMount")
foxxManager.install(srvMount, content)
}
}.map(_ => ())
}

private def deleteFoxxServices(): Future[_] = {
log.debug(s"Delete Foxx services")
private def deleteFoxxServices(): Future[Unit] = {
logger.debug(s"Delete Foxx services")
foxxManager.list().flatMap(srvDefs =>
Future.sequence(for {
srvDef <- srvDefs
srvMount = srvDef("mount").toString
if !srvMount.startsWith("/_")
} yield {
log.info(s"Uninstall Foxx service: $srvMount")
logger.info(s"Uninstall Foxx service: $srvMount")
foxxManager.uninstall(srvMount)
}
).map(_ => {})
).map(_ => ())
)
}

private def deleteSearchViews() = {
log.debug(s"Delete search views")
logger.debug(s"Delete search views")
for {
viewEntities <- db.getViews.toScala.map(_.asScala)
views = viewEntities.map(ve => db.view(ve.getName))
_ <- Future.traverse(views) { view =>
log.info(s"Delete search view: ${view.name}")
logger.info(s"Delete search view: ${view.name}")
unlessDryRunAsync(view.drop().toScala)
}
} yield {}
} yield ()
}

private def createSearchViews() = {
log.debug(s"Create search views")
logger.debug(s"Create search views")
Future.traverse(sealedInstancesOf[SearchViewDef]) { viewDef =>
log.info(s"Create search view: ${viewDef.name}")
logger.info(s"Create search view: ${viewDef.name}")
unlessDryRunAsync(db.createArangoSearch(viewDef.name, viewDef.properties).toScala)
}
}

private def deleteSearchAnalyzers() = {
log.debug(s"Delete search analyzers")
logger.debug(s"Delete search analyzers")
for {
analyzers <- db.getSearchAnalyzers.toScala.map(_.asScala)
userAnalyzers = analyzers.filter(_.getName.startsWith(s"${db.name}::"))
_ <- Future.traverse(userAnalyzers)(ua => {
log.info(s"Delete search analyzer: ${ua.getName}")
logger.info(s"Delete search analyzer: ${ua.getName}")
unlessDryRunAsync(db.deleteSearchAnalyzer(ua.getName).toScala)
})
} yield {}
} yield ()
}

private def createSearchAnalyzers() = {
log.debug(s"Create search analyzers")
logger.debug(s"Create search analyzers")
Future.traverse(sealedInstancesOf[SearchAnalyzerDef]) { ad =>
log.info(s"Create search analyzer: ${ad.name}")
logger.info(s"Create search analyzer: ${ad.name}")
unlessDryRunAsync(db.createSearchAnalyzer(ad.analyzer).toScala)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

package za.co.absa.spline.arango.foxx

import com.typesafe.scalalogging.StrictLogging
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse
import org.slf4s.Logging
import za.co.absa.spline.common.rest.RESTClient
import za.co.absa.spline.persistence.DryRunnable

Expand All @@ -30,24 +30,24 @@ class FoxxManagerImpl(
)(implicit ec: ExecutionContext)
extends FoxxManager
with DryRunnable
with Logging {
with StrictLogging {

override def install(mountPrefix: String, content: Array[Byte]): Future[Unit] = {
log.debug(s"Prepare Foxx service.zip: $mountPrefix")
logger.debug(s"Prepare Foxx service.zip: $mountPrefix")

unlessDryRunAsync(restClient.post(s"_api/foxx?mount=$mountPrefix", content))
}

override def uninstall(mountPrefix: String): Future[Unit] = {
log.debug(s"Delete Foxx service: $mountPrefix")
logger.debug(s"Delete Foxx service: $mountPrefix")
unlessDryRunAsync(restClient.delete(s"_api/foxx/service?mount=$mountPrefix"))
}

override def list(): Future[Seq[Map[String, Any]]] = {
log.debug(s"List Foxx services")
logger.debug(s"List Foxx services")
restClient.get(s"_api/foxx").map(str => {
val srvDefs = parse(str).extract(DefaultFormats, manifest[Seq[Map[String, Any]]])
log.debug(s"Found Foxx service definitions: $srvDefs")
logger.debug(s"Found Foxx service definitions: $srvDefs")
srvDefs
})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,19 +88,19 @@ class AdminCLISpec

(when(
arangoManagerMock.upgrade())
thenReturn Future.successful({}))
thenReturn Future.successful(()))

(when(
arangoManagerMock.execute(any()))
thenReturn Future.successful({}))
thenReturn Future.successful(()))

(when(
arangoManagerMock.prune(any[Duration]()))
thenReturn Future.successful({}))
thenReturn Future.successful(()))

(when(
arangoManagerMock.prune(any[ZonedDateTime]()))
thenReturn Future.successful({}))
thenReturn Future.successful(()))

it should "when called with wrong options, print welcome message" in {
captureStdErr {
Expand Down
Loading

0 comments on commit 9836cb1

Please sign in to comment.