diff --git a/build.sbt b/build.sbt index d8f0c2b3..0bfbee98 100644 --- a/build.sbt +++ b/build.sbt @@ -365,8 +365,7 @@ lazy val searchProvision = project .settings(commonSettings) .settings( name := "search-provision", - libraryDependencies ++= Dependencies.ciris, - Test / parallelExecution := false + libraryDependencies ++= Dependencies.ciris ) .dependsOn( commons % "compile->compile;test->test", @@ -450,6 +449,7 @@ lazy val commonSettings = Seq( ), Compile / console / scalacOptions := (Compile / scalacOptions).value.filterNot(_ == "-Xfatal-warnings"), Test / console / scalacOptions := (Compile / console / scalacOptions).value, + // parallel execution would work, but requires a quite powerful solr server Test / parallelExecution := false, semanticdbEnabled := true, // enable SemanticDB semanticdbVersion := scalafixSemanticdb.revision, diff --git a/flake.lock b/flake.lock index b15fece0..d56652fa 100644 --- a/flake.lock +++ b/flake.lock @@ -6,11 +6,11 @@ "nixpkgs": "nixpkgs" }, "locked": { - "lastModified": 1710272941, - "narHash": "sha256-FtezN1MomZBLAg8vf1nAD/AE8xTgOlYyrv7mOVls8N8=", + "lastModified": 1716363410, + "narHash": "sha256-foFNeJze5WIoUt2bI7YWXwj1alWIgTLj9/oXwuB4t3Q=", "owner": "eikek", "repo": "devshell-tools", - "rev": "163caa528ad0b89085d4d6ae4a6acb09f340cbc9", + "rev": "3e5537f07f29bca461525eb2cf7e79c5df5d1680", "type": "github" }, "original": { @@ -73,11 +73,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1716292277, - "narHash": "sha256-248jDA6qGlyKvNWf8ar44xQRNxc/FmEibBTfoYeTH5Y=", + "lastModified": 1716361217, + "narHash": "sha256-mzZDr00WUiUXVm1ujBVv6A0qRd8okaITyUp4ezYRgc4=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8fcbc5106945623794244cae1b4add3e3469a0db", + "rev": "46397778ef1f73414b03ed553a3368f0e7e33c2f", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index bf9e9e8a..900bd648 100644 --- a/flake.nix +++ b/flake.nix @@ -17,6 +17,9 @@ system = flake-utils.lib.system.x86_64-linux; modules = [ ./nix/services.nix + { + virtualisation.memorySize = 2048; + } ]; }; @@ -83,6 +86,8 @@ RS_SEARCH_HTTP_SERVER_PORT = "8080"; RS_PROVISION_HTTP_SERVER_PORT = "8082"; RS_METRICS_UPDATE_INTERVAL = "0s"; + RS_SOLR_CREATE_CORE_CMD = "cnt-solr-create-core %s"; + RS_SOLR_DELETE_CORE_CMD = "cnt-solr-delete-core %s"; #don't start docker container for dbTests NO_SOLR = "true"; @@ -106,6 +111,8 @@ RS_SEARCH_HTTP_SERVER_PORT = "8080"; RS_PROVISION_HTTP_SERVER_PORT = "8082"; RS_METRICS_UPDATE_INTERVAL = "0s"; + RS_SOLR_CREATE_CORE_CMD = "vm-solr-create-core %s"; + RS_SOLR_DELETE_CORE_CMD = "vm-solr-delete-core %s"; #don't start docker container for dbTests NO_SOLR = "true"; diff --git a/modules/commons/src/main/scala/io/renku/search/common/UrlPattern.scala b/modules/commons/src/main/scala/io/renku/search/common/UrlPattern.scala index 871d218a..03424976 100644 --- a/modules/commons/src/main/scala/io/renku/search/common/UrlPattern.scala +++ b/modules/commons/src/main/scala/io/renku/search/common/UrlPattern.scala @@ -52,9 +52,11 @@ object UrlPattern: ) private[common] def splitUrl(url: String): UrlParts = { def readScheme(s: String): (Option[String], String) = - s.split("://").filter(_.nonEmpty).toList match - case s :: rest :: Nil => (Some(s), rest) - case rest => (None, rest.mkString) + if (!s.contains("://")) (None, s) + else + s.split("://").filter(_.nonEmpty).toList match + case first :: rest :: Nil => (Some(first), rest) + case first => (Some(first.mkString), "") def readHostPort(s: String): (List[String], Option[String]) = s.split(':').toList match diff --git a/modules/commons/src/test/scala/io/renku/search/LoggingConfigure.scala b/modules/commons/src/test/scala/io/renku/search/LoggingConfigure.scala index d0587ecd..7a0e157b 100644 --- a/modules/commons/src/test/scala/io/renku/search/LoggingConfigure.scala +++ b/modules/commons/src/test/scala/io/renku/search/LoggingConfigure.scala @@ -20,7 +20,7 @@ package io.renku.search import io.renku.logging.LoggingSetup -trait LoggingConfigure extends munit.FunSuite: +trait LoggingConfigure extends munit.Suite: def defaultVerbosity: Int = 0 diff --git a/modules/commons/src/test/scala/io/renku/search/common/UrlPatternSpec.scala b/modules/commons/src/test/scala/io/renku/search/common/UrlPatternSpec.scala index 01c5d498..69ec4878 100644 --- a/modules/commons/src/test/scala/io/renku/search/common/UrlPatternSpec.scala +++ b/modules/commons/src/test/scala/io/renku/search/common/UrlPatternSpec.scala @@ -53,6 +53,14 @@ class UrlPatternSpec extends ScalaCheckSuite: ) test("fromString"): + assertEquals( + UrlPattern.fromString("http://"), + UrlPattern.all.copy(scheme = Some(Segment.Literal("http"))) + ) + assertEquals( + UrlPattern.fromString("http"), + UrlPattern.all.copy(host = List(Segment.Literal("http"))) + ) assertEquals(UrlPattern.fromString("*"), UrlPattern.all) assertEquals(UrlPattern.fromString(""), UrlPattern.all) assertEquals( diff --git a/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala b/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala index 8bdae110..1bf76259 100644 --- a/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala +++ b/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala @@ -67,7 +67,7 @@ object ConfigValues extends ConfigDecoders: renv("CLIENT_ID").default(default.value).as[ClientId] val solrConfig: ConfigValue[Effect, SolrConfig] = { - val url = renv("SOLR_URL").default("http://localhost:8983/solr").as[Uri] + val url = renv("SOLR_URL").default("http://localhost:8983").as[Uri] val core = renv("SOLR_CORE").default("search-core-test") val maybeUser = (renv("SOLR_USER").default("admin"), renv("SOLR_PASS")) diff --git a/modules/events/src/test/scala/io/renku/events/EventsGenerators.scala b/modules/events/src/test/scala/io/renku/events/EventsGenerators.scala index 9b0267e9..74b7c0e3 100644 --- a/modules/events/src/test/scala/io/renku/events/EventsGenerators.scala +++ b/modules/events/src/test/scala/io/renku/events/EventsGenerators.scala @@ -22,6 +22,7 @@ import java.time.Instant import java.time.temporal.ChronoUnit import io.renku.events.v1.ProjectAuthorizationAdded +import io.renku.search.GeneratorSyntax.* import io.renku.search.events.* import io.renku.search.model.Id import io.renku.search.model.MemberRole @@ -315,11 +316,11 @@ object EventsGenerators: def v1UserAddedGen( prefix: String, - firstName: Gen[FirstName] = ModelGenerators.userFirstNameGen + firstName: Gen[Option[FirstName]] = ModelGenerators.userFirstNameGen.asOption ): Gen[v1.UserAdded] = for id <- Gen.uuid.map(_.toString) - firstName <- Gen.option(firstName.map(_.value)) + firstName <- firstName.map(_.map(_.value)) lastName <- alphaStringGen(max = 5).map(v => s"$prefix-$v") email <- Gen.option(stringGen(max = 5).map(host => s"$lastName@$host.com")) yield v1.UserAdded( @@ -331,11 +332,11 @@ object EventsGenerators: def v2UserAddedGen( prefix: String, - firstName: Gen[FirstName] = ModelGenerators.userFirstNameGen + firstName: Gen[Option[FirstName]] = ModelGenerators.userFirstNameGen.asOption ): Gen[v2.UserAdded] = for id <- Gen.uuid.map(_.toString) - firstName <- Gen.option(firstName.map(_.value)) + firstName <- firstName.map(_.map(_.value)) lastName <- alphaStringGen(max = 5).map(v => s"$prefix-$v") email <- Gen.option(stringGen(max = 5).map(host => s"$lastName@$host.com")) ns <- ModelGenerators.namespaceGen @@ -349,7 +350,7 @@ object EventsGenerators: def userAddedGen( prefix: String, - firstName: Gen[FirstName] = ModelGenerators.userFirstNameGen + firstName: Gen[Option[FirstName]] = ModelGenerators.userFirstNameGen.asOption ): Gen[UserAdded] = Gen.oneOf( v1UserAddedGen(prefix, firstName).map(UserAdded.V1.apply), diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/RedisQueueClientSpec.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/RedisQueueClientSpec.scala index 073f9bfc..e1df36e3 100644 --- a/modules/redis-client/src/test/scala/io/renku/redis/client/RedisQueueClientSpec.scala +++ b/modules/redis-client/src/test/scala/io/renku/redis/client/RedisQueueClientSpec.scala @@ -27,7 +27,7 @@ import dev.profunktor.redis4cats.connection.RedisClient import dev.profunktor.redis4cats.streams.data.XAddMessage import dev.profunktor.redis4cats.streams.{RedisStream, Streaming} import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.util.RedisSpec +import io.renku.redis.client.util.RedisBaseSuite import io.renku.search.GeneratorSyntax.* import munit.CatsEffectSuite import org.scalacheck.Gen @@ -35,133 +35,128 @@ import org.scalacheck.Gen.alphaChar import org.scalacheck.cats.implicits.* import scodec.bits.ByteVector -class RedisQueueClientSpec extends CatsEffectSuite with RedisSpec: +class RedisQueueClientSpec extends CatsEffectSuite with RedisBaseSuite: + override def munitFixtures = List(redisServer, redisClients) test("can enqueue and dequeue events"): - withRedisClient.asRedisQueueClient().use { client => - val queue = RedisClientGenerators.queueNameGen.generateOne - for - dequeued <- SignallingRef.of[IO, List[(String, String)]](Nil) - - message1 = "message1" - message1Head = "header1" - _ <- client.enqueue(queue, toByteVector(message1Head), toByteVector(message1)) - - streamingProcFiber <- client - .acquireEventsStream(queue, chunkSize = 1, maybeOffset = None) - .evalMap(event => - dequeued.update(event.header.asString -> event.payload.asString :: _) - ) - .compile - .drain - .start - _ <- dequeued.waitUntil(_ == List(message1Head -> message1)) - - message2 = "message2" - message2Head = "header2" - _ <- client.enqueue(queue, toByteVector(message2Head), toByteVector(message2)) - _ <- dequeued - .waitUntil(_.toSet == Set(message1Head -> message1, message2Head -> message2)) - - _ <- streamingProcFiber.cancel - yield () - } + val queue = RedisClientGenerators.queueNameGen.generateOne + for + client <- IO(redisClients().queueClient) + dequeued <- SignallingRef.of[IO, List[(String, String)]](Nil) + + message1 = "message1" + message1Head = "header1" + _ <- client.enqueue(queue, toByteVector(message1Head), toByteVector(message1)) + + streamingProcFiber <- client + .acquireEventsStream(queue, chunkSize = 1, maybeOffset = None) + .evalMap(event => + dequeued.update(event.header.asString -> event.payload.asString :: _) + ) + .compile + .drain + .start + _ <- dequeued.waitUntil(_ == List(message1Head -> message1)) + + message2 = "message2" + message2Head = "header2" + _ <- client.enqueue(queue, toByteVector(message2Head), toByteVector(message2)) + _ <- dequeued + .waitUntil(_.toSet == Set(message1Head -> message1, message2Head -> message2)) + + _ <- streamingProcFiber.cancel + yield () test("can start enqueueing events from the given messageId excluding"): - withRedisClient.asRedisQueueClient().use { client => - val queue = RedisClientGenerators.queueNameGen.generateOne - for - dequeued <- SignallingRef.of[IO, List[String]](Nil) - - message1 = "message1" - message1Id <- client.enqueue(queue, toByteVector("head1"), toByteVector(message1)) - - streamingProcFiber <- client - .acquireEventsStream(queue, chunkSize = 1, maybeOffset = message1Id.some) - .evalMap(event => dequeued.update(event.payload.asString :: _)) - .compile - .drain - .start - - message2 = "message2" - _ <- client.enqueue(queue, toByteVector("head2"), toByteVector(message2)) - _ <- dequeued.waitUntil(_.toSet == Set(message2)) - - message3 = "message3" - _ <- client.enqueue(queue, toByteVector("head3"), toByteVector(message3)) - _ <- dequeued.waitUntil(_.toSet == Set(message2, message3)) - _ <- streamingProcFiber.cancel - yield () - } + val queue = RedisClientGenerators.queueNameGen.generateOne + for + client <- IO(redisClients().queueClient) + dequeued <- SignallingRef.of[IO, List[String]](Nil) + + message1 = "message1" + message1Id <- client.enqueue(queue, toByteVector("head1"), toByteVector(message1)) + + streamingProcFiber <- client + .acquireEventsStream(queue, chunkSize = 1, maybeOffset = message1Id.some) + .evalMap(event => dequeued.update(event.payload.asString :: _)) + .compile + .drain + .start + + message2 = "message2" + _ <- client.enqueue(queue, toByteVector("head2"), toByteVector(message2)) + _ <- dequeued.waitUntil(_.toSet == Set(message2)) + + message3 = "message3" + _ <- client.enqueue(queue, toByteVector("head3"), toByteVector(message3)) + _ <- dequeued.waitUntil(_.toSet == Set(message2, message3)) + _ <- streamingProcFiber.cancel + yield () test("can skip events that are wrongly defined"): - withRedisClient() - .flatMap(rc => withRedisClient.asRedisQueueClient().tupleLeft(rc)) - .use { case (redisClient, queueClient) => - val queue = RedisClientGenerators.queueNameGen.generateOne - for - dequeued <- SignallingRef.of[IO, List[String]](Nil) - - _ <- enqueueWithoutHeader(redisClient, queue, toByteVector("message1")) - - streamingProcFiber <- queueClient - .acquireEventsStream(queue, chunkSize = 1, maybeOffset = None) - .evalMap(event => dequeued.update(event.payload.asString :: _)) - .compile - .drain - .start - - message2 = "message2" - _ <- queueClient.enqueue(queue, toByteVector("head2"), toByteVector(message2)) - _ <- dequeued.waitUntil(_.toSet == Set(message2)) - yield () - } + val queue = RedisClientGenerators.queueNameGen.generateOne + for + clients <- IO(redisClients()) + redisClient = clients.lowLevel + queueClient = clients.queueClient + dequeued <- SignallingRef.of[IO, List[String]](Nil) + + _ <- enqueueWithoutHeader(redisClient, queue, toByteVector("message1")) + + streamingProcFiber <- queueClient + .acquireEventsStream(queue, chunkSize = 1, maybeOffset = None) + .evalMap(event => dequeued.update(event.payload.asString :: _)) + .compile + .drain + .start + + message2 = "message2" + _ <- queueClient.enqueue(queue, toByteVector("head2"), toByteVector(message2)) + _ <- dequeued.waitUntil(_.toSet == Set(message2)) + yield () test("allow marking and retrieving a processed event"): - withRedisClient.asRedisQueueClient().use { client => - val queue = RedisClientGenerators.queueNameGen.generateOne - val clientId = RedisClientGenerators.clientIdGen.generateOne - val messageId = RedisClientGenerators.messageIdGen.generateOne - for - _ <- client.findLastProcessed(clientId, queue).map(v => assert(v.isEmpty)) + val queue = RedisClientGenerators.queueNameGen.generateOne + val clientId = RedisClientGenerators.clientIdGen.generateOne + val messageId = RedisClientGenerators.messageIdGen.generateOne + for + client <- IO(redisClients().queueClient) + _ <- client.findLastProcessed(clientId, queue).map(v => assert(v.isEmpty)) - _ <- client.markProcessed(clientId, queue, messageId) + _ <- client.markProcessed(clientId, queue, messageId) - _ <- client - .findLastProcessed(clientId, queue) - .map(v => assert(v contains messageId)) - yield () - } + _ <- client + .findLastProcessed(clientId, queue) + .map(v => assert(v contains messageId)) + yield () test("can find out the total size of the given stream"): - withRedisClient.asRedisQueueClient().use { client => - val queue = RedisClientGenerators.queueNameGen.generateOne - val messages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) - for - _ <- messages.traverse_ { case (h, p) => - client.enqueue(queue, toByteVector(h), toByteVector(p)) - } - _ <- client.getSize(queue).map(s => assert(s == messages.size)) - yield () - } + val queue = RedisClientGenerators.queueNameGen.generateOne + val messages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) + for + client <- IO(redisClients().queueClient) + _ <- messages.traverse_ { case (h, p) => + client.enqueue(queue, toByteVector(h), toByteVector(p)) + } + _ <- client.getSize(queue).map(s => assert(s == messages.size)) + yield () test("can find out a size of the given stream from the given MessageId"): - withRedisClient.asRedisQueueClient().use { client => - val queue = RedisClientGenerators.queueNameGen.generateOne - val olderMessages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) - val (msgH, msgP) = (stringGen, stringGen).mapN(_ -> _).generateOne - val newerMessages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) - for - _ <- olderMessages.traverse_ { case (h, p) => - client.enqueue(queue, toByteVector(h), toByteVector(p)) - } - messageId <- client.enqueue(queue, toByteVector(msgH), toByteVector(msgP)) - _ <- newerMessages.traverse_ { case (h, p) => - client.enqueue(queue, toByteVector(h), toByteVector(p)) - } - _ <- client.getSize(queue, messageId).map(s => assert(s == newerMessages.size)) - yield () - } + val queue = RedisClientGenerators.queueNameGen.generateOne + val olderMessages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) + val (msgH, msgP) = (stringGen, stringGen).mapN(_ -> _).generateOne + val newerMessages = (stringGen, stringGen).mapN(_ -> _).generateList(1, 30) + for + client <- IO(redisClients().queueClient) + _ <- olderMessages.traverse_ { case (h, p) => + client.enqueue(queue, toByteVector(h), toByteVector(p)) + } + messageId <- client.enqueue(queue, toByteVector(msgH), toByteVector(msgP)) + _ <- newerMessages.traverse_ { case (h, p) => + client.enqueue(queue, toByteVector(h), toByteVector(p)) + } + _ <- client.getSize(queue, messageId).map(s => assert(s == newerMessages.size)) + yield () private def toByteVector(v: String): ByteVector = ByteVector.encodeUtf8(v).fold(throw _, identity) diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisBaseSuite.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisBaseSuite.scala new file mode 100644 index 00000000..cb48de10 --- /dev/null +++ b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisBaseSuite.scala @@ -0,0 +1,52 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.redis.client.util + +import cats.effect.* + +import dev.profunktor.redis4cats.Redis +import dev.profunktor.redis4cats.RedisCommands +import dev.profunktor.redis4cats.connection.RedisClient +import dev.profunktor.redis4cats.data.RedisCodec +import dev.profunktor.redis4cats.effect.Log as RedisLog +import io.renku.redis.client.RedisQueueClientImpl +import io.renku.search.LoggingConfigure +import munit.* + +trait RedisBaseSuite + extends RedisServerSuite + with LoggingConfigure + with CatsEffectFixtures: + + given RedisLog[IO] = new RedisLog { + def debug(msg: => String): IO[Unit] = scribe.cats.io.debug(msg) + def error(msg: => String): IO[Unit] = scribe.cats.io.error(msg) + def info(msg: => String): IO[Unit] = scribe.cats.io.info(msg) + } + + val redisClientsR: Resource[IO, RedisClients] = + for + config <- Resource.eval(IO(redisServer())) + lc <- RedisClient[IO] + .from(s"redis://${config.host.value}:${config.port.value}") + cmds <- Redis[IO].fromClient(lc, RedisCodec.Utf8) + qc = new RedisQueueClientImpl[IO](lc) + yield RedisClients(config, lc, cmds, qc) + + val redisClients = ResourceSuiteLocalFixture("all-redis-clients", redisClientsR) diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisClients.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisClients.scala new file mode 100644 index 00000000..3cefbeb3 --- /dev/null +++ b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisClients.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.redis.client.util + +import cats.effect.* + +import dev.profunktor.redis4cats.RedisCommands +import dev.profunktor.redis4cats.connection.RedisClient +import io.renku.redis.client.RedisConfig +import io.renku.redis.client.RedisQueueClient + +final case class RedisClients( + config: RedisConfig, + lowLevel: RedisClient, + commands: RedisCommands[IO, String, String], + queueClient: RedisQueueClient[IO] +) diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisServerSuite.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisServerSuite.scala new file mode 100644 index 00000000..a7bae147 --- /dev/null +++ b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisServerSuite.scala @@ -0,0 +1,55 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.redis.client.util + +import scala.concurrent.duration.* + +import io.renku.redis.client.* +import io.renku.servers.RedisServer +import munit.Fixture + +/** Starts the redis server if not already running. + * + * This is here for running single tests from outside sbt. Within sbt, the solr server is + * started before any test is run and therefore will live for the entire test run. + */ +trait RedisServerSuite: + + private lazy val redisServerValue = RedisServer + + val redisServer: Fixture[RedisConfig] = + new Fixture[RedisConfig]("redis-server"): + private var redisConfig: Option[RedisConfig] = None + def apply(): RedisConfig = redisConfig match + case Some(c) => c + case None => sys.error(s"Fixture $fixtureName not initialized") + + override def beforeAll(): Unit = + redisServerValue.start() + redisConfig = Some( + RedisConfig( + RedisHost(redisServerValue.host), + RedisPort(redisServerValue.port), + connectionRefreshInterval = 10.minutes + ) + ) + + override def afterAll(): Unit = + redisServerValue.stop() + redisConfig = None diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala deleted file mode 100644 index 4d82001b..00000000 --- a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.redis.client.util - -import scala.concurrent.duration.* - -import cats.effect.* -import cats.syntax.all.* - -import dev.profunktor.redis4cats.connection.RedisClient -import dev.profunktor.redis4cats.data.RedisCodec -import dev.profunktor.redis4cats.effect.Log.Stdout.instance -import dev.profunktor.redis4cats.effect.MkRedis.forAsync -import dev.profunktor.redis4cats.{Redis, RedisCommands} -import io.lettuce.core.RedisConnectionException -import io.renku.redis.client.* -import io.renku.servers.RedisServer - -trait RedisSpec: - self: munit.Suite => - - export dev.profunktor.redis4cats.effect.Log.Stdout.instance - - private lazy val server: RedisServer = RedisServer - - abstract class RedisFixture extends Fixture[Resource[IO, RedisClient]]("redis"): - def asRedisCommands(): Resource[IO, RedisCommands[IO, String, String]] - def asRedisQueueClient(): Resource[IO, RedisQueueClient[IO]] - def redisConfig: RedisConfig - - val withRedisClient: RedisFixture = new RedisFixture: - - def apply(): Resource[IO, RedisClient] = - RedisClient[IO] - .from(server.url) - .recoverWith { - case _: RedisConnectionException => apply() - case ex => Resource.raiseError[IO, RedisClient, Throwable](ex) - } - - override def asRedisCommands(): Resource[IO, RedisCommands[IO, String, String]] = - apply().flatMap(createRedisCommands) - - override def asRedisQueueClient(): Resource[IO, RedisQueueClient[IO]] = - RedisQueueClient.make[IO](redisConfig) - - override lazy val redisConfig: RedisConfig = - RedisConfig( - RedisHost(server.host), - RedisPort(server.port), - connectionRefreshInterval = 10 minutes - ) - - override def beforeAll(): Unit = - server.start() - - override def afterAll(): Unit = - server.stop() - - lazy val createRedisCommands - : RedisClient => Resource[IO, RedisCommands[IO, String, String]] = - Redis[IO].fromClient(_, RedisCodec.Utf8) - - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient) diff --git a/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueClientSpec.scala b/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueClientSpec.scala index f323a18c..5df2bbcf 100644 --- a/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueClientSpec.scala +++ b/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueClientSpec.scala @@ -26,46 +26,47 @@ import io.renku.redis.client.RedisClientGenerators import io.renku.search.GeneratorSyntax.* import io.renku.search.events.* import io.renku.search.events.EventMessage +import munit.AnyFixture import munit.CatsEffectSuite -class QueueClientSpec extends CatsEffectSuite with QueueSpec: +class QueueClientSpec extends CatsEffectSuite with QueueSuite: + override def munitFixtures = List(redisServer, queueClient) + test("can enqueue and dequeue project-member-add events"): - withQueueClient().use { queue => - val qname = RedisClientGenerators.queueNameGen.generateOne - val msg = EventsGenerators - .eventMessageGen(EventsGenerators.projectMemberAddedGen) - .generateOne - for - msgId <- queue.enqueue(qname, msg) - res <- queue - .acquireMessageStream[ProjectMemberAdded](qname, 1, None) - .take(1) - .compile - .toList - _ = assertEquals(res.head, msg.copy(id = msgId)) - yield () - } + val qname = RedisClientGenerators.queueNameGen.generateOne + val msg = EventsGenerators + .eventMessageGen(EventsGenerators.projectMemberAddedGen) + .generateOne + for + queue <- IO(queueClient()) + msgId <- queue.enqueue(qname, msg) + res <- queue + .acquireMessageStream[ProjectMemberAdded](qname, 1, None) + .take(1) + .compile + .toList + _ = assertEquals(res.head, msg.copy(id = msgId)) + yield () test("can enqueue and dequeue project-created events"): - withQueueClient().use { queueClient => - val queue = RedisClientGenerators.queueNameGen.generateOne - for - dequeued <- SignallingRef.of[IO, List[EventMessage[ProjectCreated]]](Nil) + val queue = RedisClientGenerators.queueNameGen.generateOne + for + queueClient <- IO(queueClient()) + dequeued <- SignallingRef.of[IO, List[EventMessage[ProjectCreated]]](Nil) - message0 = EventsGenerators - .eventMessageGen(EventsGenerators.projectCreatedGen("test")) - .generateOne - message1Id <- queueClient.enqueue(queue, message0) - message1 = message0.copy(id = message1Id) + message0 = EventsGenerators + .eventMessageGen(EventsGenerators.projectCreatedGen("test")) + .generateOne + message1Id <- queueClient.enqueue(queue, message0) + message1 = message0.copy(id = message1Id) - streamingProcFiber <- queueClient - .acquireMessageStream[ProjectCreated](queue, chunkSize = 1, maybeOffset = None) - .evalMap(event => dequeued.update(event :: _)) - .compile - .drain - .start - _ <- dequeued.waitUntil(_.contains(message1)) + streamingProcFiber <- queueClient + .acquireMessageStream[ProjectCreated](queue, chunkSize = 1, maybeOffset = None) + .evalMap(event => dequeued.update(event :: _)) + .compile + .drain + .start + _ <- dequeued.waitUntil(_.contains(message1)) - _ <- streamingProcFiber.cancel - yield () - } + _ <- streamingProcFiber.cancel + yield () diff --git a/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSpec.scala b/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSuite.scala similarity index 63% rename from modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSpec.scala rename to modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSuite.scala index 84aa1cea..1a4a8de0 100644 --- a/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSpec.scala +++ b/modules/renku-redis-client/src/test/scala/io/renku/queue/client/QueueSuite.scala @@ -20,19 +20,14 @@ package io.renku.queue.client import cats.effect.{IO, Resource} -import dev.profunktor.redis4cats.connection.RedisClient import io.renku.redis.client.ClientId -import io.renku.redis.client.util.RedisSpec +import io.renku.redis.client.util.RedisBaseSuite -trait QueueSpec extends RedisSpec: - self: munit.Suite => +trait QueueSuite extends RedisBaseSuite: - abstract class QueueFixture extends Fixture[Resource[IO, QueueClient[IO]]]("queue") + val queueClientR: Resource[IO, QueueClient[IO]] = + redisClientsR.map(c => + new QueueClientImpl[IO](c.queueClient, ClientId("search-provisioner")) + ) - val withQueueClient: QueueFixture = () => - withRedisClient - .asRedisQueueClient() - .map(new QueueClientImpl[IO](_, ClientId("search-provisioner"))) - - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient) + val queueClient = ResourceSuiteLocalFixture("queue-client", queueClientR) diff --git a/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala b/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala index 0d5d9f56..66ddb12b 100644 --- a/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala +++ b/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala @@ -32,63 +32,64 @@ import io.renku.search.solr.client.SearchSolrSuite import io.renku.search.solr.client.SolrDocumentGenerators.* import io.renku.search.solr.documents.{EntityDocument, User as SolrUser} import io.renku.solr.client.DocVersion +import munit.CatsEffectSuite import org.scalacheck.Gen import scribe.Scribe -class SearchApiSpec extends SearchSolrSuite: +class SearchApiSpec extends CatsEffectSuite with SearchSolrSuite: + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, searchSolrClient) private given Scribe[IO] = scribe.cats[IO] test("do a lookup in Solr to find entities matching the given phrase"): - withSearchSolrClient().use { client => - val project1 = projectDocumentGen( - "matching", - "matching description", - Gen.const(Visibility.Public) - ).generateOne - val project2 = projectDocumentGen( - "disparate", - "disparate description", - Gen.const(Visibility.Public) - ).generateOne - val searchApi = new SearchApiImpl[IO](client) - for { - _ <- client.upsert((project1 :: project2 :: Nil).map(_.widen)) - results <- searchApi - .query(AuthContext.anonymous)(mkQuery("matching")) - .map(_.fold(err => fail(s"Calling Search API failed with $err"), identity)) + val project1 = projectDocumentGen( + "matching", + "matching description", + Gen.const(Visibility.Public) + ).generateOne + val project2 = projectDocumentGen( + "disparate", + "disparate description", + Gen.const(Visibility.Public) + ).generateOne + for { + client <- IO(searchSolrClient()) + searchApi = new SearchApiImpl[IO](client) + _ <- client.upsert((project1 :: project2 :: Nil).map(_.widen)) + results <- searchApi + .query(AuthContext.anonymous)(mkQuery("matching")) + .map(_.fold(err => fail(s"Calling Search API failed with $err"), identity)) - expected = toApiEntities(project1).toSet - obtained = results.items.map(scoreToNone).toSet - } yield assert( - expected.diff(obtained).isEmpty, - s"Expected $expected, bot got $obtained" - ) - } + expected = toApiEntities(project1).toSet + obtained = results.items.map(scoreToNone).toSet + } yield assert( + expected.diff(obtained).isEmpty, + s"Expected $expected, bot got $obtained" + ) test("return Project and User entities"): - withSearchSolrClient().use { client => - val project = projectDocumentGen( - "exclusive", - "exclusive description", - Gen.const(Visibility.Public) - ).generateOne - val user = - SolrUser(project.createdBy, DocVersion.NotExists, FirstName("exclusive").some) - val searchApi = new SearchApiImpl[IO](client) - for { - _ <- client.upsert(project :: user :: Nil) - results <- searchApi - .query(AuthContext.anonymous)(mkQuery("exclusive")) - .map(_.fold(err => fail(s"Calling Search API failed with $err"), identity)) + val project = projectDocumentGen( + "exclusive", + "exclusive description", + Gen.const(Visibility.Public) + ).generateOne + val user = + SolrUser(project.createdBy, DocVersion.NotExists, FirstName("exclusive").some) + for { + client <- IO(searchSolrClient()) + searchApi = new SearchApiImpl[IO](client) + _ <- client.upsert(project :: user :: Nil) + results <- searchApi + .query(AuthContext.anonymous)(mkQuery("exclusive")) + .map(_.fold(err => fail(s"Calling Search API failed with $err"), identity)) - expected = toApiEntities(project, user).toSet - obtained = results.items.map(scoreToNone).toSet - } yield assert( - expected.diff(obtained).isEmpty, - s"Expected $expected, bot got $obtained" - ) - } + expected = toApiEntities(project, user).toSet + obtained = results.items.map(scoreToNone).toSet + } yield assert( + expected.diff(obtained).isEmpty, + s"Expected $expected, bot got $obtained" + ) private def scoreToNone(e: SearchEntity): SearchEntity = e match case e: Project => e.copy(score = None) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/ConcurrentUpdateSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/ConcurrentUpdateSpec.scala index 0ad5f746..4fe0cb68 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/ConcurrentUpdateSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/ConcurrentUpdateSpec.scala @@ -20,8 +20,8 @@ package io.renku.search.provision import scala.concurrent.duration.* +import cats.effect.IO import cats.effect.std.CountDownLatch -import cats.effect.{IO, Resource} import cats.syntax.all.* import io.renku.events.EventsGenerators @@ -42,49 +42,48 @@ import org.scalacheck.Gen class ConcurrentUpdateSpec extends ProvisioningSuite: testCases.foreach { tc => test(s"process concurrent events: $tc"): - - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for { - _ <- tc.dbState.create(solrClient) - - collector <- BackgroundCollector[SolrDocument]( - loadProjectPartialOrEntity(solrClient, tc.projectId) - ) - _ <- collector.start - msgFiber <- List(handlers.projectCreated, handlers.projectAuthAdded) - .traverse(_.compile.drain.start) - - latch <- CountDownLatch[IO](1) - - sendAuth <- (latch.await >> queueClient.enqueue( - queueConfig.projectAuthorizationAdded, - EventsGenerators.eventMessageGen(Gen.const(tc.authAdded)).generateOne - )).start - - sendCreate <- (latch.await >> queueClient.enqueue( - queueConfig.projectCreated, - EventsGenerators.eventMessageGen(Gen.const(tc.projectCreated)).generateOne - )).start - - _ <- latch.release - _ <- List(sendAuth, sendCreate).traverse_(_.join) - - _ <- collector.waitUntil( - docs => - scribe.debug(s"Check for ${tc.expectedProject}") - docs.exists(tc.checkExpected) - , - timeout = 30.seconds - ) - - _ <- msgFiber.traverse_(_.cancel) - } yield () - } + for { + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + _ <- tc.dbState.create(solrClient) + + collector <- BackgroundCollector[SolrDocument]( + loadProjectPartialOrEntity(solrClient, tc.projectId) + ) + _ <- collector.start + msgFiber <- List(handler.projectCreated, handler.projectAuthAdded) + .traverse(_.compile.drain.start) + + latch <- CountDownLatch[IO](1) + + sendAuth <- (latch.await >> queueClient.enqueue( + queueConfig.projectAuthorizationAdded, + EventsGenerators.eventMessageGen(Gen.const(tc.authAdded)).generateOne + )).start + + sendCreate <- (latch.await >> queueClient.enqueue( + queueConfig.projectCreated, + EventsGenerators.eventMessageGen(Gen.const(tc.projectCreated)).generateOne + )).start + + _ <- latch.release + _ <- List(sendAuth, sendCreate).traverse_(_.join) + + _ <- collector.waitUntil( + docs => + scribe.debug(s"Check for ${tc.expectedProject}") + docs.exists(tc.checkExpected) + , + timeout = 30.seconds + ) + + _ <- msgFiber.traverse_(_.cancel) + } yield () } - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - object ConcurrentUpdateSpec: enum DbState: case Empty diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala index 0df9cc3c..63cce882 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala @@ -18,11 +18,13 @@ package io.renku.search.provision +import scala.concurrent.duration.Duration + import cats.effect.{IO, Resource} import cats.syntax.all.* import fs2.Stream -import io.renku.queue.client.{QueueClient, QueueSpec} +import io.renku.queue.client.{QueueClient, QueueSuite} import io.renku.redis.client.QueueName import io.renku.search.config.QueuesConfig import io.renku.search.model.{EntityType, Id, Namespace} @@ -31,8 +33,9 @@ import io.renku.search.solr.client.{SearchSolrClient, SearchSolrSuite} import io.renku.search.solr.documents.{Group as GroupDocument, User as UserDocument, *} import io.renku.search.solr.query.SolrToken import io.renku.solr.client.{QueryData, QueryString} +import munit.CatsEffectSuite -trait ProvisioningSuite extends SearchSolrSuite with QueueSpec: +trait ProvisioningSuite extends CatsEffectSuite with SearchSolrSuite with QueueSuite: val queueConfig: QueuesConfig = QueuesConfig( projectCreated = QueueName("projectCreated"), projectUpdated = QueueName("projectUpdated"), @@ -51,19 +54,23 @@ trait ProvisioningSuite extends SearchSolrSuite with QueueSpec: groupMemberRemoved = QueueName("groupMemberRemoved") ) - def withMessageHandlers( - cfg: QueuesConfig = queueConfig - ): Resource[IO, (MessageHandlers[IO], QueueClient[IO], SearchSolrClient[IO])] = - (withSearchSolrClient(), withQueueClient()).mapN { (solrClient, queueClient) => - val steps = - PipelineSteps[IO]( - solrClient, - Stream[IO, QueueClient[IO]](queueClient), - inChunkSize = 1 - ) - val handlers = MessageHandlers[IO](steps, queueConfig) - (handlers, queueClient, solrClient) - } + override def munitIOTimeout: Duration = Duration(1, "min") + + val testServicesR: Resource[IO, TestServices] = + for + solrClient <- searchSolrR + queue <- queueClientR + steps = PipelineSteps[IO]( + solrClient, + Stream[IO, QueueClient[IO]](queue), + inChunkSize = 1 + ) + handlers = MessageHandlers[IO](steps, queueConfig) + yield TestServices(handlers, queue, solrClient) + + val testServices = ResourceSuiteLocalFixture("test-services", testServicesR) + + override def munitFixtures = List(solrServer, redisServer, testServices) def loadProjectsByNs(solrClient: SearchSolrClient[IO])( ns: Namespace diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/TestServices.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/TestServices.scala new file mode 100644 index 00000000..4d99ae57 --- /dev/null +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/TestServices.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.effect.* + +import io.renku.queue.client.QueueClient +import io.renku.search.solr.client.SearchSolrClient + +final case class TestServices( + messageHandlers: MessageHandlers[IO], + queueClient: QueueClient[IO], + searchClient: SearchSolrClient[IO] +) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupAddedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupAddedProvisioningSpec.scala index c7fd020f..d39b2a27 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupAddedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupAddedProvisioningSpec.scala @@ -19,7 +19,7 @@ package io.renku.search.provision.group import cats.data.NonEmptyList -import cats.effect.{IO, Resource} +import cats.effect.IO import io.renku.events.EventsGenerators import io.renku.search.GeneratorSyntax.* @@ -35,70 +35,73 @@ import org.scalacheck.Gen class GroupAddedProvisioningSpec extends ProvisioningSuite: test("overwrite data for duplicate events"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - id <- IO(ModelGenerators.idGen.generateOne) - _ <- solrClient.deleteIds(NonEmptyList.of(id)) - add1 <- queueClient.enqueue( - queueConfig.groupAdded, - EventsGenerators - .eventMessageGen( - Gen.const(GroupAdded(id, Name("SDSC"), Namespace("sdsc-namespace"), None)) - ) - .generateOne - ) - add2 <- queueClient.enqueue( - queueConfig.groupAdded, - EventsGenerators - .eventMessageGen( - Gen.const( - GroupAdded(id, Name("Renku"), Namespace("sdsc-namespace"), None) - ) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + id <- IO(ModelGenerators.idGen.generateOne) + _ <- solrClient.deleteIds(NonEmptyList.of(id)) + add1 <- queueClient.enqueue( + queueConfig.groupAdded, + EventsGenerators + .eventMessageGen( + Gen.const(GroupAdded(id, Name("SDSC"), Namespace("sdsc-namespace"), None)) + ) + .generateOne + ) + add2 <- queueClient.enqueue( + queueConfig.groupAdded, + EventsGenerators + .eventMessageGen( + Gen.const( + GroupAdded(id, Name("Renku"), Namespace("sdsc-namespace"), None) ) - .generateOne - ) - results <- handlers - .makeUpsert[GroupAdded](queueConfig.groupAdded) - .take(2) - .compile - .toList + ) + .generateOne + ) + results <- handler + .makeUpsert[GroupAdded](queueConfig.groupAdded) + .take(2) + .compile + .toList - _ = assert(results.nonEmpty && results.forall(_.isSuccess)) - doc <- solrClient.findById[EntityDocument](CompoundId.groupEntity(id)) - _ = assert(doc.isDefined, "group not found") - group = doc.get.asInstanceOf[GroupDocument] - _ = assertEquals(group.name, Name("Renku")) - yield () - } + _ = assert(results.nonEmpty && results.forall(_.isSuccess)) + doc <- solrClient.findById[EntityDocument](CompoundId.groupEntity(id)) + _ = assert(doc.isDefined, "group not found") + group = doc.get.asInstanceOf[GroupDocument] + _ = assertEquals(group.name, Name("Renku")) + yield () test("can fetch events, decode them, and send them to Solr"): val groupAdded = EventsGenerators.groupAddedGen(prefix = "group-added").generateOne - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- queueClient.enqueue( - queueConfig.groupAdded, - EventsGenerators.eventMessageGen(Gen.const(groupAdded)).generateOne - ) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - result <- handlers - .makeUpsert[GroupAdded](queueConfig.groupAdded) - .take(10) - .find(_.isSuccess) - .compile - .lastOrError + _ <- queueClient.enqueue( + queueConfig.groupAdded, + EventsGenerators.eventMessageGen(Gen.const(groupAdded)).generateOne + ) - _ = assert(result.isSuccess) + result <- handler + .makeUpsert[GroupAdded](queueConfig.groupAdded) + .take(10) + .find(_.isSuccess) + .compile + .lastOrError - doc <- solrClient.findById[EntityDocument]( - CompoundId.groupEntity(groupAdded.id) - ) - _ = assert(doc.isDefined) - _ = assertEquals( - doc.get.setVersion(DocVersion.Off), - groupAdded.fold(_.toModel(DocVersion.Off)) - ) - yield () - } + _ = assert(result.isSuccess) - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + doc <- solrClient.findById[EntityDocument]( + CompoundId.groupEntity(groupAdded.id) + ) + _ = assert(doc.isDefined) + _ = assertEquals( + doc.get.setVersion(DocVersion.Off), + groupAdded.fold(_.toModel(DocVersion.Off)) + ) + yield () diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberAddedSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberAddedSpec.scala index 363bcbd4..3bc4c9df 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberAddedSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberAddedSpec.scala @@ -37,47 +37,47 @@ import io.renku.solr.client.QueryString import org.scalacheck.Gen class GroupMemberAddedSpec extends ProvisioningSuite: - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) test("adding member to group and related projects"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - val initialState = DbState.groupWithProjectsGen.generateOne - val role = ModelGenerators.memberRoleGen.generateOne - val newMember = - GroupMemberAdded(initialState.group.id, ModelGenerators.idGen.generateOne, role) - for - _ <- initialState.setup(solrClient) - msg = EventsGenerators.eventMessageGen(Gen.const(newMember)).generateOne - _ <- queueClient.enqueue(queueConfig.groupMemberAdded, msg) - _ <- handlers - .makeGroupMemberUpsert[GroupMemberAdded](queueConfig.groupMemberAdded) - .take(2) // two updates, one for the single group and one for all its projects - .compile - .toList - currentGroup <- solrClient - .findById[EntityDocument]( - CompoundId.groupEntity(initialState.group.id) - ) - .map(_.get.asInstanceOf[GroupDocument]) - _ = assert( - currentGroup.toEntityMembers.getMemberIds(role).contains(newMember.userId), - s"new member '${newMember.userId}' not in group $role" + val initialState = DbState.groupWithProjectsGen.generateOne + val role = ModelGenerators.memberRoleGen.generateOne + val newMember = + GroupMemberAdded(initialState.group.id, ModelGenerators.idGen.generateOne, role) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + _ <- initialState.setup(solrClient) + msg = EventsGenerators.eventMessageGen(Gen.const(newMember)).generateOne + _ <- queueClient.enqueue(queueConfig.groupMemberAdded, msg) + _ <- handler + .makeGroupMemberUpsert[GroupMemberAdded](queueConfig.groupMemberAdded) + .take(2) // two updates, one for the single group and one for all its projects + .compile + .toList + currentGroup <- solrClient + .findById[EntityDocument]( + CompoundId.groupEntity(initialState.group.id) ) + .map(_.get.asInstanceOf[GroupDocument]) + _ = assert( + currentGroup.toEntityMembers.getMemberIds(role).contains(newMember.userId), + s"new member '${newMember.userId}' not in group $role" + ) - currentProjects <- solrClient - .query[EntityDocument](initialState.projectQuery) - .map(_.responseBody.docs) - .map(_.map(_.asInstanceOf[ProjectDocument])) - _ = assertEquals(currentProjects.size, initialState.projects.size) - _ = assert( - currentProjects.forall( - _.toGroupMembers.getMemberIds(role).contains(newMember.userId) - ), - s"new member '${newMember.userId}' not in projects group $role" - ) - yield () - } + currentProjects <- solrClient + .query[EntityDocument](initialState.projectQuery) + .map(_.responseBody.docs) + .map(_.map(_.asInstanceOf[ProjectDocument])) + _ = assertEquals(currentProjects.size, initialState.projects.size) + _ = assert( + currentProjects.forall( + _.toGroupMembers.getMemberIds(role).contains(newMember.userId) + ), + s"new member '${newMember.userId}' not in projects group $role" + ) + yield () object GroupMemberAddedSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberRemovedSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberRemovedSpec.scala index 8aac0cca..e5db5fcc 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberRemovedSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberRemovedSpec.scala @@ -37,46 +37,46 @@ import io.renku.solr.client.QueryString import org.scalacheck.Gen class GroupMemberRemovedSpec extends ProvisioningSuite: - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - test("adding member to group and related projects"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - val initialState = DbState.groupWithProjectsGen.generateOne - val role = ModelGenerators.memberRoleGen.generateOne - val removeMember = GroupMemberRemoved(initialState.group.id, initialState.user) - for - _ <- initialState.setup(solrClient) - msg = EventsGenerators.eventMessageGen(Gen.const(removeMember)).generateOne - _ <- queueClient.enqueue(queueConfig.groupMemberRemoved, msg) - _ <- handlers - .makeGroupMemberUpsert[GroupMemberRemoved](queueConfig.groupMemberRemoved) - .take(2) // two updates, one for the single group and one for all its projects - .compile - .toList - currentGroup <- solrClient - .findById[EntityDocument]( - CompoundId.groupEntity(initialState.group.id) - ) - .map(_.get.asInstanceOf[GroupDocument]) - _ = assert( - !currentGroup.toEntityMembers.getMemberIds(role).contains(removeMember.userId), - s"member '${removeMember.userId}' still in group $role" - ) + val initialState = DbState.groupWithProjectsGen.generateOne + val role = ModelGenerators.memberRoleGen.generateOne + val removeMember = GroupMemberRemoved(initialState.group.id, initialState.user) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - currentProjects <- solrClient - .query[EntityDocument](initialState.projectQuery) - .map(_.responseBody.docs) - .map(_.map(_.asInstanceOf[ProjectDocument])) - _ = assertEquals(currentProjects.size, initialState.projects.size) - _ = assert( - currentProjects.forall( - !_.toGroupMembers.getMemberIds(role).contains(removeMember.userId) - ), - s"member '${removeMember.userId}' still in projects group $role" + _ <- initialState.setup(solrClient) + msg = EventsGenerators.eventMessageGen(Gen.const(removeMember)).generateOne + _ <- queueClient.enqueue(queueConfig.groupMemberRemoved, msg) + _ <- handler + .makeGroupMemberUpsert[GroupMemberRemoved](queueConfig.groupMemberRemoved) + .take(2) // two updates, one for the single group and one for all its projects + .compile + .toList + currentGroup <- solrClient + .findById[EntityDocument]( + CompoundId.groupEntity(initialState.group.id) ) - yield () - } + .map(_.get.asInstanceOf[GroupDocument]) + _ = assert( + !currentGroup.toEntityMembers.getMemberIds(role).contains(removeMember.userId), + s"member '${removeMember.userId}' still in group $role" + ) + + currentProjects <- solrClient + .query[EntityDocument](initialState.projectQuery) + .map(_.responseBody.docs) + .map(_.map(_.asInstanceOf[ProjectDocument])) + _ = assertEquals(currentProjects.size, initialState.projects.size) + _ = assert( + currentProjects.forall( + !_.toGroupMembers.getMemberIds(role).contains(removeMember.userId) + ), + s"member '${removeMember.userId}' still in projects group $role" + ) + yield () object GroupMemberRemovedSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberUpdatedSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberUpdatedSpec.scala index 7c4d1c4b..de8a4992 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberUpdatedSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupMemberUpdatedSpec.scala @@ -37,49 +37,47 @@ import io.renku.solr.client.QueryString import org.scalacheck.Gen class GroupMemberUpdatedSpec extends ProvisioningSuite: - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - - override def defaultVerbosity: Int = 2 - test("updating member to group and related projects"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - val initialState = DbState.groupWithProjectsGen.generateOne - val role = ModelGenerators.memberRoleGen.generateOne - val newMember = - GroupMemberUpdated(initialState.group.id, ModelGenerators.idGen.generateOne, role) - for - _ <- initialState.setup(solrClient) - msg = EventsGenerators.eventMessageGen(Gen.const(newMember)).generateOne - _ <- queueClient.enqueue(queueConfig.groupMemberUpdated, msg) - _ <- handlers - .makeGroupMemberUpsert[GroupMemberUpdated](queueConfig.groupMemberUpdated) - .take(2) // two updates, one for the single group and one for all its projects - .compile - .toList - currentGroup <- solrClient - .findById[EntityDocument]( - CompoundId.groupEntity(initialState.group.id) - ) - .map(_.get.asInstanceOf[GroupDocument]) - _ = assert( - currentGroup.toEntityMembers.getMemberIds(role).contains(newMember.userId), - s"new member '${newMember.userId}' not in group $role" - ) + val initialState = DbState.groupWithProjectsGen.generateOne + val role = ModelGenerators.memberRoleGen.generateOne + val newMember = + GroupMemberUpdated(initialState.group.id, ModelGenerators.idGen.generateOne, role) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - currentProjects <- solrClient - .query[EntityDocument](initialState.projectQuery) - .map(_.responseBody.docs) - .map(_.map(_.asInstanceOf[ProjectDocument])) - _ = assertEquals(currentProjects.size, initialState.projects.size) - _ = assert( - currentProjects.forall( - _.toGroupMembers.getMemberIds(role).contains(newMember.userId) - ), - s"new member '${newMember.userId}' not in projects group $role" + _ <- initialState.setup(solrClient) + msg = EventsGenerators.eventMessageGen(Gen.const(newMember)).generateOne + _ <- queueClient.enqueue(queueConfig.groupMemberUpdated, msg) + _ <- handler + .makeGroupMemberUpsert[GroupMemberUpdated](queueConfig.groupMemberUpdated) + .take(2) // two updates, one for the single group and one for all its projects + .compile + .toList + currentGroup <- solrClient + .findById[EntityDocument]( + CompoundId.groupEntity(initialState.group.id) ) - yield () - } + .map(_.get.asInstanceOf[GroupDocument]) + _ = assert( + currentGroup.toEntityMembers.getMemberIds(role).contains(newMember.userId), + s"new member '${newMember.userId}' not in group $role" + ) + + currentProjects <- solrClient + .query[EntityDocument](initialState.projectQuery) + .map(_.responseBody.docs) + .map(_.map(_.asInstanceOf[ProjectDocument])) + _ = assertEquals(currentProjects.size, initialState.projects.size) + _ = assert( + currentProjects.forall( + _.toGroupMembers.getMemberIds(role).contains(newMember.userId) + ), + s"new member '${newMember.userId}' not in projects group $role" + ) + yield () object GroupMemberUpdatedSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupRemovedProcessSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupRemovedProcessSpec.scala index e756b92c..a65922a3 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupRemovedProcessSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupRemovedProcessSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.group -import cats.effect.{IO, Resource} +import cats.effect.IO import cats.syntax.all.* import io.renku.events.EventsGenerators @@ -49,29 +49,29 @@ class GroupRemovedProcessSpec extends ProvisioningSuite: "and turn all the group's project to partial in Solr" ): val initialState = GroupRemovedProcessSpec.DbState.create.generateOne - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- initialState.setup(solrClient) - init <- initialState.loadByIds(solrClient) - _ = assertEquals(init.setVersion(DocVersion.NotExists), initialState) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - _ <- queueClient.enqueue( - queueConfig.groupRemoved, - EventsGenerators - .eventMessageGen(Gen.const(GroupRemoved(initialState.group.id))) - .generateOne - ) + _ <- initialState.setup(solrClient) + init <- initialState.loadByIds(solrClient) + _ = assertEquals(init.setVersion(DocVersion.NotExists), initialState) - _ <- handlers.makeGroupRemoved.take(1).compile.toList + _ <- queueClient.enqueue( + queueConfig.groupRemoved, + EventsGenerators + .eventMessageGen(Gen.const(GroupRemoved(initialState.group.id))) + .generateOne + ) - projects <- initialState.loadPartialProjects(solrClient) - _ = assertEquals(projects.size, initialState.projects.size) - _ = assertEquals(projects.map(_.id), initialState.projects.map(_.id)) - yield () - } + _ <- handler.makeGroupRemoved.take(1).compile.toList - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + projects <- initialState.loadPartialProjects(solrClient) + _ = assertEquals(projects.size, initialState.projects.size) + _ = assertEquals(projects.map(_.id), initialState.projects.map(_.id)) + yield () object GroupRemovedProcessSpec: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupUpdatedProvisioningSpec.scala index 7bc879a7..b0bd321d 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/group/GroupUpdatedProvisioningSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.group -import cats.effect.{IO, Resource} +import cats.effect.IO import cats.syntax.all.* import io.renku.events.EventsGenerators @@ -41,36 +41,36 @@ class GroupUpdatedProvisioningSpec extends ProvisioningSuite: GroupUpdatedProvisioningSpec.testCases.foreach { tc => test(s"can fetch events, decode them, and update group doc in Solr: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- tc.dbState.create(solrClient) - - _ <- queueClient.enqueue( - queueConfig.groupUpdated, - EventsGenerators.eventMessageGen(Gen.const(tc.groupUpdated)).generateOne - ) - - _ <- handlers - .makeGroupUpdated(queueConfig.groupUpdated) - .take(2) - .compile - .toList - - group <- loadGroupPartialOrEntity(solrClient, tc.groupId) - _ = assertEquals(group.size, 1) - _ = assert(tc.checkExpectedGroup(group.head)) - - projects <- tc.projectQuery - .map(q => solrClient.queryAll[EntityDocument](q).compile.toList) - .getOrElse(IO(Nil)) - _ = assert(tc.checkExpectedProjects(group.head, projects)) - yield () - } + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + _ <- tc.dbState.create(solrClient) + + _ <- queueClient.enqueue( + queueConfig.groupUpdated, + EventsGenerators.eventMessageGen(Gen.const(tc.groupUpdated)).generateOne + ) + + _ <- handler + .makeGroupUpdated(queueConfig.groupUpdated) + .take(2) + .compile + .toList + + group <- loadGroupPartialOrEntity(solrClient, tc.groupId) + _ = assertEquals(group.size, 1) + _ = assert(tc.checkExpectedGroup(group.head)) + + projects <- tc.projectQuery + .map(q => solrClient.queryAll[EntityDocument](q).compile.toList) + .getOrElse(IO(Nil)) + _ = assert(tc.checkExpectedProjects(group.head, projects)) + yield () } - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - object GroupUpdatedProvisioningSpec: enum DbState: case Empty diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/handler/PushToSolrSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/handler/PushToSolrSpec.scala index a5b2a731..fc6a457c 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/handler/PushToSolrSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/handler/PushToSolrSpec.scala @@ -34,10 +34,13 @@ import io.renku.search.solr.client.SearchSolrSuite import io.renku.search.solr.documents.User as UserDocument import io.renku.solr.client.DocVersion import io.renku.solr.client.UpsertResponse +import munit.CatsEffectSuite import org.scalacheck.Gen import scribe.Scribe -class PushToSolrSpec extends SearchSolrSuite: +class PushToSolrSpec extends CatsEffectSuite with SearchSolrSuite: + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, searchSolrClient) val messageGen: Gen[EventMessage[UserDocument]] = for @@ -59,49 +62,49 @@ class PushToSolrSpec extends SearchSolrSuite: test("honor max retries on conflict"): val reader = PushToSolrSpec.MessageReaderMock() - withSearchSolrClient().use { client => - val pushToSolr = PushToSolr[IO](client, reader) - val counter = Ref.unsafe[IO, Int](0) - val msg = - messageGen.generateOne.map(_.setVersion(DocVersion.Exists): EntityOrPartial) - val maxRetries = 2 - for - r <- pushData(pushToSolr, counter.update(_ + 1))( - msg, - maxRetries - ).compile.lastOrError - runs <- counter.get - _ = assertEquals(runs, maxRetries + 1) - _ = assertEquals(r, UpsertResponse.VersionConflict) - marked <- reader.getProcessed - _ = assert(marked.contains(msg.id)) - yield () - } + val counter = Ref.unsafe[IO, Int](0) + val msg = + messageGen.generateOne.map(_.setVersion(DocVersion.Exists): EntityOrPartial) + val maxRetries = 2 + + for + client <- IO(searchSolrClient()) + pushToSolr = PushToSolr[IO](client, reader) + r <- pushData(pushToSolr, counter.update(_ + 1))( + msg, + maxRetries + ).compile.lastOrError + runs <- counter.get + _ = assertEquals(runs, maxRetries + 1) + _ = assertEquals(r, UpsertResponse.VersionConflict) + marked <- reader.getProcessed + _ = assert(marked.contains(msg.id)) + yield () test("try until success"): val reader = PushToSolrSpec.MessageReaderMock() - withSearchSolrClient().use { client => - val pushToSolr = PushToSolr[IO](client, reader) - val msg = messageGen.generateOne.map(_.setVersion(DocVersion.Exists)) - val counter = Ref.unsafe[IO, Int](0) - val post = counter.updateAndGet(_ + 1).flatMap { + val msg = messageGen.generateOne.map(_.setVersion(DocVersion.Exists)) + val counter = Ref.unsafe[IO, Int](0) + val msgCast = msg.map(e => e: EntityOrPartial) + val maxRetries = 6 + + for + client <- IO(searchSolrClient()) + post = counter.updateAndGet(_ + 1).flatMap { case n if n == 3 => scribe.cats.io.info(s"inserting now") >> client .upsertSuccess(msg.payload.map(_.setVersion(DocVersion.Off))) .void case _ => IO.unit } - val msgCast = msg.map(e => e: EntityOrPartial) - val maxRetries = 6 - for - r <- pushData(pushToSolr, post)(msgCast, maxRetries).compile.lastOrError - runs <- counter.get - _ = assertEquals(runs, 3) - _ = assert(r.isSuccess) - marked <- reader.getProcessed - _ = assert(marked.contains(msg.id)) - yield () - } + pushToSolr = PushToSolr[IO](client, reader) + r <- pushData(pushToSolr, post)(msgCast, maxRetries).compile.lastOrError + runs <- counter.get + _ = assertEquals(runs, 3) + _ = assert(r.isSuccess) + marked <- reader.getProcessed + _ = assert(marked.contains(msg.id)) + yield () object PushToSolrSpec: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/metrics/DocumentKindGaugeUpdaterSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/metrics/DocumentKindGaugeUpdaterSpec.scala index 2e71eab3..013c5606 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/metrics/DocumentKindGaugeUpdaterSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/metrics/DocumentKindGaugeUpdaterSpec.scala @@ -27,23 +27,23 @@ import io.renku.search.model.EntityType.User import io.renku.search.solr.client.SearchSolrSuite import io.renku.search.solr.client.SolrDocumentGenerators.userDocumentGen import io.renku.search.solr.documents.DocumentKind +import munit.CatsEffectSuite -class DocumentKindGaugeUpdaterSpec extends SearchSolrSuite: +class DocumentKindGaugeUpdaterSpec extends CatsEffectSuite with SearchSolrSuite: + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, searchSolrClient) test("update should fetch the data and insert 0 for missing kinds"): - withSearchSolrClient().use { client => - val user = userDocumentGen.generateOne - val gauge = TestGauge(User) - - val gaugeUpdater = new DocumentKindGaugeUpdater[IO](client, gauge) - - for { - _ <- client.upsert(Seq(user.widen)) - _ <- gaugeUpdater.update() - } yield assert { - gauge.acc(DocumentKind.FullEntity) >= 1d && - gauge.acc(DocumentKind.PartialEntity) == 0d - } + val user = userDocumentGen.generateOne + val gauge = TestGauge(User) + for { + client <- IO(searchSolrClient()) + gaugeUpdater = new DocumentKindGaugeUpdater[IO](client, gauge) + _ <- client.upsert(Seq(user.widen)) + _ <- gaugeUpdater.update() + } yield assert { + gauge.acc(DocumentKind.FullEntity) >= 1d && + gauge.acc(DocumentKind.PartialEntity) == 0d } private class TestGauge(override val entityType: EntityType) extends DocumentKindGauge: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala index 35035b11..f192460d 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.project -import cats.effect.{IO, Resource} +import cats.effect.IO import io.renku.events.EventsGenerators import io.renku.search.GeneratorSyntax.* @@ -40,34 +40,33 @@ class AuthorizationAddedProvisioningSpec extends ProvisioningSuite: testCases.foreach { tc => test(s"can fetch events, decode them, and update docs in Solr: $tc"): + for { + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for { - _ <- tc.dbState.create(solrClient) - - collector <- BackgroundCollector[SolrDocument]( - loadProjectPartialOrEntity(solrClient, tc.projectId) - ) - _ <- collector.start - - provisioningFiber <- handlers.projectAuthAdded.compile.drain.start - - _ <- queueClient.enqueue( - queueConfig.projectAuthorizationAdded, - EventsGenerators.eventMessageGen(Gen.const(tc.authAdded)).generateOne - ) - _ <- collector.waitUntil(docs => - scribe.debug(s"Check for ${tc.expectedProject}") - docs.exists(tc.checkExpected) - ) - - _ <- provisioningFiber.cancel - } yield () - } - } + _ <- tc.dbState.create(solrClient) + + collector <- BackgroundCollector[SolrDocument]( + loadProjectPartialOrEntity(solrClient, tc.projectId) + ) + _ <- collector.start + + provisioningFiber <- handler.projectAuthAdded.compile.drain.start - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + _ <- queueClient.enqueue( + queueConfig.projectAuthorizationAdded, + EventsGenerators.eventMessageGen(Gen.const(tc.authAdded)).generateOne + ) + _ <- collector.waitUntil(docs => + scribe.debug(s"Check for ${tc.expectedProject}") + docs.exists(tc.checkExpected) + ) + + _ <- provisioningFiber.cancel + } yield () + } object AuthorizationAddedProvisioningSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala index e8207e7e..ca5e4557 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala @@ -38,34 +38,34 @@ class AuthorizationRemovedProvisioningSpec extends ProvisioningSuite: testCases.foreach { tc => test(s"can fetch events, decode them, and update docs in Solr: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for { - _ <- tc.dbState.create(solrClient) - - collector <- BackgroundCollector[SolrDocument]( - loadProjectPartialOrEntity(solrClient, tc.projectId) - ) - _ <- collector.start - - provisioningFiber <- handlers.projectAuthRemoved.compile.drain.start - - _ <- queueClient.enqueue( - queueConfig.projectAuthorizationRemoved, - EventsGenerators.eventMessageGen(Gen.const(tc.authRemoved)).generateOne - ) - _ <- collector.waitUntil(docs => - scribe.debug(s"Check for ${tc.expectedProject}") - tc.checkExpected(docs) - ) - - _ <- provisioningFiber.cancel - } yield () - } + for { + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + _ <- tc.dbState.create(solrClient) + + collector <- BackgroundCollector[SolrDocument]( + loadProjectPartialOrEntity(solrClient, tc.projectId) + ) + _ <- collector.start + + provisioningFiber <- handler.projectAuthRemoved.compile.drain.start + + _ <- queueClient.enqueue( + queueConfig.projectAuthorizationRemoved, + EventsGenerators.eventMessageGen(Gen.const(tc.authRemoved)).generateOne + ) + _ <- collector.waitUntil(docs => + scribe.debug(s"Check for ${tc.expectedProject}") + tc.checkExpected(docs) + ) + + _ <- provisioningFiber.cancel + } yield () } - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - object AuthorizationRemovedProvisioningSpec: enum DbState: case Empty diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala index b65bb56b..855c9e18 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala @@ -19,7 +19,7 @@ package io.renku.search.provision package project -import cats.effect.{IO, Resource} +import cats.effect.IO import io.renku.events.EventsGenerators import io.renku.search.GeneratorSyntax.* @@ -39,33 +39,33 @@ class AuthorizationUpdatedProvisioningSpec extends ProvisioningSuite: testCases.foreach { tc => test(s"can fetch events, decode them, and update docs in Solr: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for { - _ <- tc.dbState.create(solrClient) + for { + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - collector <- BackgroundCollector[SolrDocument]( - loadProjectPartialOrEntity(solrClient, tc.projectId) - ) - _ <- collector.start + _ <- tc.dbState.create(solrClient) - provisioningFiber <- handlers.projectAuthUpdated.compile.drain.start + collector <- BackgroundCollector[SolrDocument]( + loadProjectPartialOrEntity(solrClient, tc.projectId) + ) + _ <- collector.start - _ <- queueClient.enqueue( - queueConfig.projectAuthorizationUpdated, - EventsGenerators.eventMessageGen(Gen.const(tc.authUpdated)).generateOne - ) - _ <- collector.waitUntil(docs => - scribe.debug(s"Check for ${tc.expectedProject}") - tc.checkExpected(docs) - ) + provisioningFiber <- handler.projectAuthUpdated.compile.drain.start - _ <- provisioningFiber.cancel - } yield () - } - } + _ <- queueClient.enqueue( + queueConfig.projectAuthorizationUpdated, + EventsGenerators.eventMessageGen(Gen.const(tc.authUpdated)).generateOne + ) + _ <- collector.waitUntil(docs => + scribe.debug(s"Check for ${tc.expectedProject}") + tc.checkExpected(docs) + ) - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + _ <- provisioningFiber.cancel + } yield () + } object AuthorizationUpdatedProvisioningSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala index fd644e88..3a279610 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.project -import cats.effect.{IO, Resource} +import cats.effect.IO import io.renku.events.EventsGenerators import io.renku.events.EventsGenerators.projectCreatedGen @@ -42,89 +42,95 @@ import org.scalacheck.Gen class ProjectCreatedProvisioningSpec extends ProvisioningSuite: ProjectCreatedProvisioningSpec.testCases.foreach { tc => test(s"processes message and update solr: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for { - _ <- tc.dbState.create(solrClient) - - _ <- queueClient.enqueue( - queueConfig.projectCreated, - EventsGenerators - .eventMessageGen(Gen.const(tc.projectCreated)) - .map(_.modifyHeader(_.withContentType(DataContentType.Binary))) - .generateOne - ) - - _ <- handlers - .makeProjectUpsert[ProjectCreated](queueConfig.projectCreated) - .take(1) - .compile - .toList - - doc <- loadProjectPartialOrEntity(solrClient, tc.projectId) - _ = assertEquals( - doc.head.setVersion(DocVersion.Off), - tc.expectedProject.setVersion(DocVersion.Off) - ) - } yield () - } - } + for { + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - test("can fetch events binary encoded, decode them, and send them to Solr"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - provisioningFiber <- handlers.projectCreated.compile.drain.start + _ <- tc.dbState.create(solrClient) - created = projectCreatedGen(prefix = "binary").generateOne _ <- queueClient.enqueue( queueConfig.projectCreated, EventsGenerators - .eventMessageGen(Gen.const(created)) + .eventMessageGen(Gen.const(tc.projectCreated)) .map(_.modifyHeader(_.withContentType(DataContentType.Binary))) .generateOne ) - collector <- BackgroundCollector( - solrClient - .findById[EntityDocument](CompoundId.projectEntity(created.id)) - .map(_.toSet) - ) - _ <- collector.start - _ <- collector.waitUntil( - _.map(_.setVersion(DocVersion.Off)) contains created.toModel(DocVersion.Off) - ) - - _ <- provisioningFiber.cancel - yield () - } - test("can fetch events JSON encoded, decode them, and send them to Solr"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - provisioningFiber <- handlers.projectCreated.compile.drain.start + _ <- handler + .makeProjectUpsert[ProjectCreated](queueConfig.projectCreated) + .take(1) + .compile + .toList - created = projectCreatedGen(prefix = "json").generateOne - _ <- queueClient.enqueue( - queueConfig.projectCreated, - EventsGenerators - .eventMessageGen(Gen.const(created)) - .map(_.modifyHeader(_.withContentType(DataContentType.Json))) - .generateOne - ) - collector <- BackgroundCollector( - solrClient - .findById[EntityDocument](CompoundId.projectEntity(created.id)) - .map(_.toSet) - ) - _ <- collector.start - _ <- collector.waitUntil( - _.map(_.setVersion(DocVersion.Off)) contains created.toModel(DocVersion.Off) + doc <- loadProjectPartialOrEntity(solrClient, tc.projectId) + _ = assertEquals( + doc.head.setVersion(DocVersion.Off), + tc.expectedProject.setVersion(DocVersion.Off) ) + } yield () + } + + test("can fetch events binary encoded, decode them, and send them to Solr"): + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + provisioningFiber <- handler.projectCreated.compile.drain.start + + created = projectCreatedGen(prefix = "binary").generateOne + _ <- queueClient.enqueue( + queueConfig.projectCreated, + EventsGenerators + .eventMessageGen(Gen.const(created)) + .map(_.modifyHeader(_.withContentType(DataContentType.Binary))) + .generateOne + ) + collector <- BackgroundCollector( + solrClient + .findById[EntityDocument](CompoundId.projectEntity(created.id)) + .map(_.toSet) + ) + _ <- collector.start + _ <- collector.waitUntil( + _.map(_.setVersion(DocVersion.Off)) contains created.toModel(DocVersion.Off) + ) - _ <- provisioningFiber.cancel - yield () - } + _ <- provisioningFiber.cancel + yield () + + test("can fetch events JSON encoded, decode them, and send them to Solr"): + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + provisioningFiber <- handler.projectCreated.compile.drain.start + + created = projectCreatedGen(prefix = "json").generateOne + _ <- queueClient.enqueue( + queueConfig.projectCreated, + EventsGenerators + .eventMessageGen(Gen.const(created)) + .map(_.modifyHeader(_.withContentType(DataContentType.Json))) + .generateOne + ) + collector <- BackgroundCollector( + solrClient + .findById[EntityDocument](CompoundId.projectEntity(created.id)) + .map(_.toSet) + ) + _ <- collector.start + _ <- collector.waitUntil( + _.map(_.setVersion(DocVersion.Off)) contains created.toModel(DocVersion.Off) + ) - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + _ <- provisioningFiber.cancel + yield () object ProjectCreatedProvisioningSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala index fb64f297..66e10675 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala @@ -20,7 +20,7 @@ package io.renku.search.provision.project import scala.concurrent.duration.* -import cats.effect.{IO, Resource} +import cats.effect.IO import fs2.Stream import fs2.concurrent.SignallingRef @@ -42,53 +42,53 @@ import org.scalacheck.Gen class ProjectRemovedProcessSpec extends ProvisioningSuite: test(s"can fetch events, decode them, and remove Solr"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - solrDoc <- SignallingRef.of[IO, Option[EntityDocument]](None) - - provisioningFiber <- handlers.projectRemoved.compile.drain.start - - created = projectCreatedGen(prefix = "remove").generateOne - _ <- solrClient.upsert(Seq(created.toModel(DocVersion.Off).widen)) - - docsCollectorFiber <- - Stream - .awakeEvery[IO](500 millis) - .evalMap(_ => - solrClient.findById[EntityDocument]( - CompoundId.projectEntity(created.id) - ) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + solrDoc <- SignallingRef.of[IO, Option[EntityDocument]](None) + + provisioningFiber <- handler.projectRemoved.compile.drain.start + + created = projectCreatedGen(prefix = "remove").generateOne + _ <- solrClient.upsert(Seq(created.toModel(DocVersion.Off).widen)) + + docsCollectorFiber <- + Stream + .awakeEvery[IO](500 millis) + .evalMap(_ => + solrClient.findById[EntityDocument]( + CompoundId.projectEntity(created.id) ) - .evalMap(e => solrDoc.update(_ => e)) - .compile - .drain - .start - - _ <- solrDoc.waitUntil( - _.nonEmpty - ) - - removed = ProjectRemoved(created.id) - schemaVersion = Gen.oneOf(removed.version.toList).generateOne - schema = schemaVersion match - case SchemaVersion.V1 => v1.ProjectRemoved.SCHEMA$ - case SchemaVersion.V2 => v2.ProjectRemoved.SCHEMA$ - - _ <- queueClient.enqueue( - queueConfig.projectRemoved, - EventsGenerators.eventMessageGen(Gen.const(removed)).generateOne - ) - - _ <- solrDoc.waitUntil( - _.isEmpty - ) - - _ <- provisioningFiber.cancel - _ <- docsCollectorFiber.cancel - yield () - } + ) + .evalMap(e => solrDoc.update(_ => e)) + .compile + .drain + .start + + _ <- solrDoc.waitUntil( + _.nonEmpty + ) + + removed = ProjectRemoved(created.id) + schemaVersion = Gen.oneOf(removed.version.toList).generateOne + schema = schemaVersion match + case SchemaVersion.V1 => v1.ProjectRemoved.SCHEMA$ + case SchemaVersion.V2 => v2.ProjectRemoved.SCHEMA$ + + _ <- queueClient.enqueue( + queueConfig.projectRemoved, + EventsGenerators.eventMessageGen(Gen.const(removed)).generateOne + ) + + _ <- solrDoc.waitUntil( + _.isEmpty + ) + + _ <- provisioningFiber.cancel + _ <- docsCollectorFiber.cancel + yield () private lazy val queryProjects = Query(typeIs(EntityType.Project)) - - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala index 019aed91..a6a0352a 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.project -import cats.effect.{IO, Resource} +import cats.effect.IO import cats.syntax.all.* import io.renku.events.EventsGenerators @@ -42,31 +42,31 @@ class ProjectUpdatedProvisioningSpec extends ProvisioningSuite: ProjectUpdatedProvisioningSpec.testCases.foreach { tc => test(s"can fetch events, decode them, and update in Solr: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- tc.dbState.create(solrClient) - _ <- queueClient.enqueue( - queueConfig.projectUpdated, - EventsGenerators.eventMessageGen(Gen.const(tc.projectUpdated)).generateOne - ) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - _ <- handlers - .makeProjectUpsert[ProjectUpdated](queueConfig.projectUpdated) - .take(1) - .compile - .toList + _ <- tc.dbState.create(solrClient) + _ <- queueClient.enqueue( + queueConfig.projectUpdated, + EventsGenerators.eventMessageGen(Gen.const(tc.projectUpdated)).generateOne + ) - docs <- loadProjectPartialOrEntity(solrClient, tc.projectId) - _ = docs.headOption match - case Some(doc) => - assertEquals(doc.setVersion(DocVersion.Off), tc.expectedProject) - case None => fail("no project document found") - yield () - } - } + _ <- handler + .makeProjectUpsert[ProjectUpdated](queueConfig.projectUpdated) + .take(1) + .compile + .toList - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + docs <- loadProjectPartialOrEntity(solrClient, tc.projectId) + _ = docs.headOption match + case Some(doc) => + assertEquals(doc.setVersion(DocVersion.Off), tc.expectedProject) + case None => fail("no project document found") + yield () + } object ProjectUpdatedProvisioningSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala index a0b4432d..bbb20b01 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala @@ -19,7 +19,8 @@ package io.renku.search.provision.user import cats.data.NonEmptyList -import cats.effect.{IO, Resource} +import cats.effect.IO +import cats.syntax.all.* import io.renku.events.EventsGenerators import io.renku.search.GeneratorSyntax.* @@ -34,80 +35,83 @@ import io.renku.solr.client.DocVersion import org.scalacheck.Gen class UserAddedProvisioningSpec extends ProvisioningSuite: - test("overwrite data for duplicate events".flaky): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - id <- IO(ModelGenerators.idGen.generateOne) - _ <- solrClient.deleteIds(NonEmptyList.of(id)) - add1 <- queueClient.enqueue( - queueConfig.userAdded, - EventsGenerators - .eventMessageGen( - EventsGenerators - .userAddedGen("ua-", Gen.const(FirstName("john1"))) - .map(_.withId(id)) - ) - .generateOne - ) - results1 <- handlers - .makeUpsert[UserAdded](queueConfig.userAdded) - .take(1) - .compile - .toList + test("overwrite data for duplicate events"): + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - add2 <- queueClient.enqueue( - queueConfig.userAdded, - EventsGenerators - .eventMessageGen( - EventsGenerators - .userAddedGen("ua-", Gen.const(FirstName("john2"))) - .map(_.withId(id)) - ) - .generateOne - ) - results2 <- handlers - .makeUpsert[UserAdded](queueConfig.userAdded) - .take(1) - .compile - .toList - results = results1 ++ results2 + id <- IO(ModelGenerators.idGen.generateOne) + _ <- solrClient.deleteIds(NonEmptyList.of(id)) + add1 <- queueClient.enqueue( + queueConfig.userAdded, + EventsGenerators + .eventMessageGen( + EventsGenerators + .userAddedGen("ua-", Gen.const(FirstName("john1").some)) + .map(_.withId(id)) + ) + .generateOne + ) + results1 <- handler + .makeUpsert[UserAdded](queueConfig.userAdded) + .take(1) + .compile + .toList - _ = assert(results.nonEmpty && results.forall(_.isSuccess)) - doc <- solrClient.findById[EntityDocument](CompoundId.userEntity(id)) - _ = assert(doc.isDefined, "user not found") - user = doc.get.asInstanceOf[UserDocument] - _ = assertEquals(user.firstName, Some(FirstName("john2"))) - yield () - } + add2 <- queueClient.enqueue( + queueConfig.userAdded, + EventsGenerators + .eventMessageGen( + EventsGenerators + .userAddedGen("ua-", Gen.const(FirstName("john2").some)) + .map(_.withId(id)) + ) + .generateOne + ) + results2 <- handler + .makeUpsert[UserAdded](queueConfig.userAdded) + .take(1) + .compile + .toList + results = results1 ++ results2 + + _ = assert(results.nonEmpty && results.forall(_.isSuccess)) + doc <- solrClient.findById[EntityDocument](CompoundId.userEntity(id)) + _ = assert(doc.isDefined, "user not found") + user = doc.get.asInstanceOf[UserDocument] + _ = assertEquals(user.firstName, Some(FirstName("john2"))) + yield () test("can fetch events, decode them, and send them to Solr"): val userAdded = EventsGenerators.userAddedGen(prefix = "user-added").generateOne - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- queueClient.enqueue( - queueConfig.userAdded, - EventsGenerators.eventMessageGen(Gen.const(userAdded)).generateOne - ) + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - result <- handlers - .makeUpsert[UserAdded](queueConfig.userAdded) - .take(10) - .find(_.isSuccess) - .compile - .lastOrError + _ <- queueClient.enqueue( + queueConfig.userAdded, + EventsGenerators.eventMessageGen(Gen.const(userAdded)).generateOne + ) - _ = assert(result.isSuccess) + result <- handler + .makeUpsert[UserAdded](queueConfig.userAdded) + .take(10) + .find(_.isSuccess) + .compile + .lastOrError - doc <- solrClient.findById[EntityDocument]( - CompoundId.userEntity(userAdded.id) - ) - _ = assert(doc.isDefined) - _ = assertEquals( - doc.get.setVersion(DocVersion.Off), - userAdded.toModel(DocVersion.Off) - ) - yield () - } + _ = assert(result.isSuccess) - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) + doc <- solrClient.findById[EntityDocument]( + CompoundId.userEntity(userAdded.id) + ) + _ = assert(doc.isDefined) + _ = assertEquals( + doc.get.setVersion(DocVersion.Off), + userAdded.toModel(DocVersion.Off) + ) + yield () diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala index 675418b6..45a35694 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.user -import cats.effect.{IO, Resource} +import cats.effect.IO import io.renku.events.EventsGenerators import io.renku.events.EventsGenerators.* @@ -45,56 +45,56 @@ class UserRemovedProcessSpec extends ProvisioningSuite: UserRemovedProcessSpec.testCases.foreach { tc => test(s"process user removed: $tc"): - withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => - for - _ <- tc.setup(solrClient) - - msgId <- queueClient.enqueue( - queueConfig.userRemoved, - EventsGenerators.eventMessageGen(Gen.const(tc.userRemovedEvent)).generateOne - ) - - _ <- handlers.makeUserRemoved.take(1).compile.drain - - users <- loadPartialOrEntity(solrClient, EntityType.User, tc.userId) - _ = assert(users.isEmpty) - - projects <- solrClient - .queryAll[EntityDocument](QueryData(QueryString(tc.projectsQuery.value))) - .compile - .toList - groups <- solrClient - .queryAll[EntityDocument](QueryData(QueryString(tc.groupsQuery.value))) - .compile - .toList - - _ = assertEquals(projects.size, tc.initialProjectsCount) - _ = assertEquals(groups.size, tc.initialGroupsCount) - - _ = assert( - projects.forall { - case p: ProjectDocument => !p.toEntityMembers.contains(tc.userId) - case _ => false - }, - "user is still in project members" - ) - _ = assert( - groups.forall { - case g: GroupDocument => !g.toEntityMembers.contains(tc.userId) - case _ => false - }, - "user is still in group members" - ) - - last <- queueClient.findLastProcessed(queueConfig.userRemoved) - _ = assertEquals(last, Some(msgId)) - yield () - } + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient + + _ <- tc.setup(solrClient) + + msgId <- queueClient.enqueue( + queueConfig.userRemoved, + EventsGenerators.eventMessageGen(Gen.const(tc.userRemovedEvent)).generateOne + ) + + _ <- handler.makeUserRemoved.take(1).compile.drain + + users <- loadPartialOrEntity(solrClient, EntityType.User, tc.userId) + _ = assert(users.isEmpty) + + projects <- solrClient + .queryAll[EntityDocument](QueryData(QueryString(tc.projectsQuery.value))) + .compile + .toList + groups <- solrClient + .queryAll[EntityDocument](QueryData(QueryString(tc.groupsQuery.value))) + .compile + .toList + + _ = assertEquals(projects.size, tc.initialProjectsCount) + _ = assertEquals(groups.size, tc.initialGroupsCount) + + _ = assert( + projects.forall { + case p: ProjectDocument => !p.toEntityMembers.contains(tc.userId) + case _ => false + }, + "user is still in project members" + ) + _ = assert( + groups.forall { + case g: GroupDocument => !g.toEntityMembers.contains(tc.userId) + case _ => false + }, + "user is still in group members" + ) + + last <- queueClient.findLastProcessed(queueConfig.userRemoved) + _ = assertEquals(last, Some(msgId)) + yield () } - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) - object UserRemovedProcessSpec: enum DbState: diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala index e68ec578..d737a0e9 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala @@ -18,7 +18,7 @@ package io.renku.search.provision.user -import cats.effect.{IO, Resource} +import cats.effect.IO import cats.syntax.all.* import io.renku.events.EventsGenerators @@ -37,37 +37,40 @@ class UserUpdatedProvisioningSpec extends ProvisioningSuite: case TestCase(name, updateF) => val userAdded = EventsGenerators.userAddedGen(prefix = "user-update").generateOne test(s"can fetch events, decode them, and update in Solr in case of $name"): - withMessageHandlers(queueConfig).use { case (handler, queueClient, solrClient) => - for - collector <- BackgroundCollector[EntityDocument]( - solrClient - .findById[EntityDocument]( - CompoundId.userEntity(userAdded.id) - ) - .map(_.toSet) - ) - _ <- collector.start + for + services <- IO(testServices()) + handler = services.messageHandlers + queueClient = services.queueClient + solrClient = services.searchClient - provisioningFiber <- handler.userUpdated.compile.drain.start + collector <- BackgroundCollector[EntityDocument]( + solrClient + .findById[EntityDocument]( + CompoundId.userEntity(userAdded.id) + ) + .map(_.toSet) + ) + _ <- collector.start - orig = userAdded.toModel(DocVersion.Off) - _ <- solrClient.upsert(Seq(orig.widen)) + provisioningFiber <- handler.userUpdated.compile.drain.start - userUpdated = updateF(userAdded) - _ <- queueClient.enqueue( - queueConfig.userUpdated, - EventsGenerators.eventMessageGen(Gen.const(userUpdated)).generateOne - ) + orig = userAdded.toModel(DocVersion.Off) + _ <- solrClient.upsert(Seq(orig.widen)) - _ <- collector.waitUntil(docs => - docs.map(_.setVersion(DocVersion.Off)) contains userUpdated - .toModel(orig) - .setVersion(DocVersion.Off) - ) + userUpdated = updateF(userAdded) + _ <- queueClient.enqueue( + queueConfig.userUpdated, + EventsGenerators.eventMessageGen(Gen.const(userUpdated)).generateOne + ) - _ <- provisioningFiber.cancel - yield () - } + _ <- collector.waitUntil(docs => + docs.map(_.setVersion(DocVersion.Off)) contains userUpdated + .toModel(orig) + .setVersion(DocVersion.Off) + ) + + _ <- provisioningFiber.cancel + yield () } private case class TestCase(name: String, f: UserAdded => UserUpdated) @@ -154,6 +157,3 @@ class UserUpdatedProvisioningSpec extends ProvisioningSuite: ) } ) - - override def munitFixtures: Seq[Fixture[?]] = - List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala index 95c1e656..abfd2ebe 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala @@ -33,81 +33,81 @@ import io.renku.search.solr.documents.EntityOps.* import io.renku.search.solr.schema.EntityDocumentSchema.Fields import io.renku.solr.client.DocVersion import io.renku.solr.client.QueryData +import munit.CatsEffectSuite -class SearchSolrClientSpec extends SearchSolrSuite: +class SearchSolrClientSpec extends CatsEffectSuite with SearchSolrSuite: + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, searchSolrClient) test("be able to insert and fetch a Project document"): - withSearchSolrClient().use { client => - val project = - projectDocumentGen("solr-project", "solr project description").generateOne - for { - _ <- client.upsert(Seq(project.widen)) - qr <- client.queryEntity( - SearchRole.Admin, - Query.parse("solr").toOption.get, - 10, - 0 - ) - _ = assert( - qr.responseBody.docs.map( - _.noneScore - .assertVersionNot(DocVersion.NotExists) - .setVersion(DocVersion.NotExists) - ) contains project - ) - gr <- client.findById[EntityDocument](CompoundId.projectEntity(project.id)) - _ = assert( - gr.map( - _.assertVersionNot(DocVersion.NotExists).setVersion(DocVersion.NotExists) - ) contains project - ) - } yield () - } + val project = + projectDocumentGen("solr-project", "solr project description").generateOne + for { + client <- IO(searchSolrClient()) + _ <- client.upsert(Seq(project.widen)) + qr <- client.queryEntity( + SearchRole.Admin, + Query.parse("solr").toOption.get, + 10, + 0 + ) + _ = assert( + qr.responseBody.docs.map( + _.noneScore + .assertVersionNot(DocVersion.NotExists) + .setVersion(DocVersion.NotExists) + ) contains project + ) + gr <- client.findById[EntityDocument](CompoundId.projectEntity(project.id)) + _ = assert( + gr.map( + _.assertVersionNot(DocVersion.NotExists).setVersion(DocVersion.NotExists) + ) contains project + ) + } yield () test("be able to insert and fetch a User document"): - withSearchSolrClient().use { client => - val firstName = users.FirstName("Johnny") - val user = userDocumentGen.generateOne.copy(firstName = firstName.some) - for { - _ <- client.upsert(Seq(user.widen)) - qr <- client.queryEntity( - SearchRole.Admin, - Query.parse(firstName.value).toOption.get, - 10, - 0 - ) - _ = assert( - qr.responseBody.docs.map( - _.noneScore - .assertVersionNot(DocVersion.NotExists) - .setVersion(DocVersion.NotExists) - ) contains user - ) - gr <- client.findById[EntityDocument](CompoundId.userEntity(user.id)) - _ = assert( - gr.map( - _.assertVersionNot(DocVersion.NotExists).setVersion(DocVersion.NotExists) - ) contains user - ) - } yield () - } + val firstName = users.FirstName("Johnny") + val user = userDocumentGen.generateOne.copy(firstName = firstName.some) + for { + client <- IO(searchSolrClient()) + _ <- client.upsert(Seq(user.widen)) + qr <- client.queryEntity( + SearchRole.Admin, + Query.parse(firstName.value).toOption.get, + 10, + 0 + ) + _ = assert( + qr.responseBody.docs.map( + _.noneScore + .assertVersionNot(DocVersion.NotExists) + .setVersion(DocVersion.NotExists) + ) contains user + ) + gr <- client.findById[EntityDocument](CompoundId.userEntity(user.id)) + _ = assert( + gr.map( + _.assertVersionNot(DocVersion.NotExists).setVersion(DocVersion.NotExists) + ) contains user + ) + } yield () test("be able to find by the given query"): - withSearchSolrClient().use { client => - val firstName = users.FirstName("Ian") - val user = userDocumentGen.generateOne.copy(firstName = firstName.some) - case class UserId(id: String) - given Decoder[UserId] = deriveDecoder[UserId] - for { - _ <- client.upsert(Seq(user.widen)) - gr <- client.query[UserId]( - QueryData( - s"firstName:$firstName", - filter = Seq.empty, - limit = 100, - offset = 0 - ).withFields(Fields.id) - ) - _ = assert(gr.responseBody.docs.map(_.id) contains user.id.value) - } yield () - } + val firstName = users.FirstName("Ian") + val user = userDocumentGen.generateOne.copy(firstName = firstName.some) + case class UserId(id: String) + given Decoder[UserId] = deriveDecoder[UserId] + for { + client <- IO(searchSolrClient()) + _ <- client.upsert(Seq(user.widen)) + gr <- client.query[UserId]( + QueryData( + s"firstName:$firstName", + filter = Seq.empty, + limit = 100, + offset = 0 + ).withFields(Fields.id) + ) + _ = assert(gr.responseBody.docs.map(_.id) contains user.id.value) + } yield () diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrSuite.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrSuite.scala index 69ec628e..d3b24d36 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrSuite.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrSuite.scala @@ -19,57 +19,22 @@ package io.renku.search.solr.client import cats.effect.* -import cats.effect.std.CountDownLatch import io.renku.search.solr.schema.Migrations import io.renku.solr.client.SolrClient import io.renku.solr.client.migration.SchemaMigrator import io.renku.solr.client.util.SolrClientBaseSuite -abstract class SearchSolrSuite extends SolrClientBaseSuite: +trait SearchSolrSuite extends SolrClientBaseSuite: - abstract class SolrFixture - extends Fixture[Resource[IO, SearchSolrClient[IO]]]("search-solr") + val solrClientWithSchemaR: Resource[IO, SolrClient[IO]] = + solrClientR.evalTap(c => SchemaMigrator[IO](c).migrate(Migrations.all)) - val withSearchSolrClient: SolrFixture = new SolrFixture: + val searchSolrR: Resource[IO, SearchSolrClient[IO]] = + solrClientWithSchemaR.map(new SearchSolrClientImpl[IO](_)) - def apply(): Resource[IO, SearchSolrClient[IO]] = - SolrClient[IO](solrConfig.copy(core = server.searchCoreName)) - .evalTap(SearchSolrSuite.setupSchema(server.searchCoreName, _)) - .map(new SearchSolrClientImpl[IO](_)) + val solrClientWithSchema = + ResourceSuiteLocalFixture("solr-client-with-schema", solrClientWithSchemaR) - override def beforeAll(): Unit = - server.start() - - override def afterAll(): Unit = - server.stop() - - override def munitFixtures: Seq[Fixture[?]] = - List(withSearchSolrClient) - -object SearchSolrSuite: - private val logger = scribe.cats.io - private case class MigrateState(tasks: Map[String, IO[Unit]] = Map.empty): - def add(name: String, task: IO[Unit]): MigrateState = copy(tasks.updated(name, task)) - private val currentState: Ref[IO, MigrateState] = - Ref.unsafe(MigrateState()) - - private def setupSchema(coreName: String, client: SolrClient[IO]): IO[Unit] = - CountDownLatch[IO](1).flatMap { latch => - currentState.flatModify { state => - state.tasks.get(coreName) match - case Some(t) => - ( - state, - logger - .info(s"Waiting for migrations to finish for core $coreName") - .flatMap(_ => t) - ) - case None => - val task = SchemaMigrator[IO](client) - .migrate(Migrations.all) - .flatTap(_ => latch.release) - val wait = latch.await - (state.add(coreName, wait), task) - } - } + val searchSolrClient = + ResourceSuiteLocalFixture("search-solr-client", searchSolrR) diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/query/LuceneQueryInterpreterSpec.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/query/LuceneQueryInterpreterSpec.scala index c827ae84..5cdef386 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/query/LuceneQueryInterpreterSpec.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/query/LuceneQueryInterpreterSpec.scala @@ -34,16 +34,14 @@ import io.renku.search.solr.SearchRole import io.renku.search.solr.client.SearchSolrSuite import io.renku.search.solr.documents.DocumentKind import io.renku.search.solr.schema.EntityDocumentSchema.Fields -import io.renku.search.solr.schema.Migrations -import io.renku.solr.client.migration.SchemaMigrator import io.renku.solr.client.{QueryData, QueryString} import munit.ScalaCheckEffectSuite import org.scalacheck.Test.Parameters import org.scalacheck.effect.PropF class LuceneQueryInterpreterSpec extends SearchSolrSuite with ScalaCheckEffectSuite: - - override protected lazy val coreName: String = server.testCoreName2 + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, solrClientWithSchema) override protected def scalaCheckTestParameters: Parameters = super.scalaCheckTestParameters.withMinSuccessfulTests(20) @@ -63,9 +61,6 @@ class LuceneQueryInterpreterSpec extends SearchSolrSuite with ScalaCheckEffectSu val q = LuceneQueryInterpreter[Id].run(ctx, userQuery) QueryData(QueryString(q.query.value, 10, 0)).withSort(q.sort) - def withSolr = - withSolrClient().evalTap(c => SchemaMigrator[IO](c).migrate(Migrations.all).void) - test("amend query with auth data"): assertEquals( query("help", SearchRole.user(model.Id("13"))).query, @@ -91,22 +86,23 @@ class LuceneQueryInterpreterSpec extends SearchSolrSuite with ScalaCheckEffectSu ) assertEquals(query("", SearchRole.Admin).query, "(_kind:fullentity)") - test("valid content_all query"): - withSolr.use { client => + test("valid content_all query") { + IO(solrClientWithSchema()).flatMap { client => List("hello world", "bla:test") .map(query(_)) .traverse_(client.query[Unit]) } + } - test("generate valid solr queries"): + test("generate valid solr queries") { PropF.forAllF(QueryGenerators.query) { q => - withSolr - .use { client => - client.query(query(q)).void - } + IO(solrClientWithSchema()).flatMap { client => + client.query(query(q)).void + } } + } - test("sort only"): + test("sort only") { val doc = Map( Fields.id.name -> "one", Fields.name.name -> "John", @@ -115,46 +111,45 @@ class LuceneQueryInterpreterSpec extends SearchSolrSuite with ScalaCheckEffectSu ) PropF.forAllF(QueryGenerators.sortTerm) { order => val q = Query(Query.Segment.Sort(order)) - withSolr.use { client => - for { - _ <- client.upsert(Seq(doc)) - r <- client.query[Map[String, String]]( - query(q).withFields(Fields.id, Fields.name, Fields.entityType).withLimit(2) - ) - _ = assert( - r.responseBody.docs.nonEmpty, - s"Expected at least one result, but got: ${r.responseBody.docs}" - ) - } yield () - } - } - - test("auth scenarios"): - withSearchSolrClient().use { solr => for { - data <- AuthTestData.generate - _ <- solr.upsert(data.all) - query = data.queryAll - - publicEntities <- solr.queryEntity(SearchRole.Anonymous, query, 50, 0) - user1Entities <- solr.queryEntity(SearchRole.User(data.user1.id), query, 50, 0) - user2Entities <- solr.queryEntity(SearchRole.User(data.user2.id), query, 50, 0) - user3Entities <- solr.queryEntity(SearchRole.User(data.user3.id), query, 50, 0) - _ = assertEquals( - publicEntities.responseBody.docs.map(_.id).toSet, - data.publicEntityIds.toSet - ) - _ = assertEquals( - user1Entities.responseBody.docs.map(_.id).toSet, - data.user1EntityIds.toSet - ) - _ = assertEquals( - user2Entities.responseBody.docs.map(_.id).toSet, - data.user2EntityIds.toSet + client <- IO(solrClientWithSchema()) + _ <- client.upsert(Seq(doc)) + r <- client.query[Map[String, String]]( + query(q).withFields(Fields.id, Fields.name, Fields.entityType).withLimit(2) ) - _ = assertEquals( - user3Entities.responseBody.docs.map(_.id).toSet, - data.user3EntityIds.toSet + _ = assert( + r.responseBody.docs.nonEmpty, + s"Expected at least one result, but got: ${r.responseBody.docs}" ) } yield () } + } + + test("auth scenarios"): + for { + solr <- IO(searchSolrClient()) + data <- AuthTestData.generate + _ <- solr.upsert(data.all) + query = data.queryAll + + publicEntities <- solr.queryEntity(SearchRole.Anonymous, query, 50, 0) + user1Entities <- solr.queryEntity(SearchRole.User(data.user1.id), query, 50, 0) + user2Entities <- solr.queryEntity(SearchRole.User(data.user2.id), query, 50, 0) + user3Entities <- solr.queryEntity(SearchRole.User(data.user3.id), query, 50, 0) + _ = assertEquals( + publicEntities.responseBody.docs.map(_.id).toSet, + data.publicEntityIds.toSet + ) + _ = assertEquals( + user1Entities.responseBody.docs.map(_.id).toSet, + data.user1EntityIds.toSet + ) + _ = assertEquals( + user2Entities.responseBody.docs.map(_.id).toSet, + data.user2EntityIds.toSet + ) + _ = assertEquals( + user3Entities.responseBody.docs.map(_.id).toSet, + data.user3EntityIds.toSet + ) + } yield () diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/CoreResponse.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/CoreResponse.scala new file mode 100644 index 00000000..fc6f1d64 --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/CoreResponse.scala @@ -0,0 +1,39 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client + +import io.bullet.borer.Decoder +import io.bullet.borer.NullOptions.given +import io.bullet.borer.derivation.{MapBasedCodecs, key} + +final case class CoreResponse( + responseHeader: ResponseHeader, + error: Option[CoreResponse.Error] = None, + core: Option[String] = None +): + + def isSuccess: Boolean = error.isEmpty + +object CoreResponse: + + final case class Error(@key("msg") message: String) + object Error: + given Decoder[Error] = MapBasedCodecs.deriveDecoder + + given Decoder[CoreResponse] = MapBasedCodecs.deriveDecoder diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/CreateCoreRequest.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/CreateCoreRequest.scala new file mode 100644 index 00000000..5eeabe48 --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/CreateCoreRequest.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client + +import io.bullet.borer.Encoder +import io.bullet.borer.Writer +import io.bullet.borer.derivation.MapBasedCodecs + +final case class CreateCoreRequest( + name: String, + configSet: String +) + +object CreateCoreRequest: + + given Encoder[CreateCoreRequest] = + given inner: Encoder[CreateCoreRequest] = + MapBasedCodecs.deriveEncoder[CreateCoreRequest] + new Encoder[CreateCoreRequest] { + def write(w: Writer, v: CreateCoreRequest): Writer = + w.writeMapOpen(1) + w.writeMapMember("create", v) + w.writeMapClose() + } diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/DeleteCoreRequest.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/DeleteCoreRequest.scala new file mode 100644 index 00000000..52399811 --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/DeleteCoreRequest.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client + +import io.bullet.borer.Encoder +import io.bullet.borer.Writer +import io.bullet.borer.derivation.MapBasedCodecs + +final case class DeleteCoreRequest( + deleteInstanceDir: Boolean, + deleteIndex: Boolean +) + +object DeleteCoreRequest: + + given Encoder[DeleteCoreRequest] = + given inner: Encoder[DeleteCoreRequest] = + MapBasedCodecs.deriveEncoder[DeleteCoreRequest] + new Encoder[DeleteCoreRequest] { + def write(w: Writer, v: DeleteCoreRequest): Writer = + w.writeMapOpen(1) + w.writeMapMember("unload", v) + w.writeMapClose() + } diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/SchemaResponse.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/SchemaResponse.scala new file mode 100644 index 00000000..1dfac7ee --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/SchemaResponse.scala @@ -0,0 +1,32 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client + +import io.bullet.borer.Decoder +import io.bullet.borer.derivation.MapBasedCodecs +import io.renku.solr.client.schema.* +import io.renku.solr.client.schema.SchemaJsonCodec.given + +final case class SchemaResponse( + responseHeader: ResponseHeader, + schema: CoreSchema +) + +object SchemaResponse: + given Decoder[SchemaResponse] = MapBasedCodecs.deriveDecoder diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClient.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClient.scala index fbd1cca6..ceb9ddd3 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClient.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClient.scala @@ -47,6 +47,12 @@ trait SolrClient[F[_]]: def findById[A: Decoder](id: String, other: String*): F[GetByIdResponse[A]] + def getSchema: F[SchemaResponse] + + def getStatus: F[StatusResponse] + def createCore(name: String, configSet: Option[String] = None): F[Unit] + def deleteCore(name: String): F[Unit] + object SolrClient: def apply[F[_]: Async: Network](config: SolrConfig): Resource[F, SolrClient[F]] = ClientBuilder(EmberClientBuilder.default[F]) diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClientImpl.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClientImpl.scala index 3d2e5620..7ea8fd83 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClientImpl.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/SolrClientImpl.scala @@ -37,7 +37,7 @@ private class SolrClientImpl[F[_]: Async](val config: SolrConfig, underlying: Cl with BorerEntityJsonCodec with SolrEntityCodec: private val logger = scribe.cats.effect[F] - private val solrUrl: Uri = config.baseUrl / config.core + private val solrUrl: Uri = config.baseUrl / "solr" / config.core def modifySchema(cmds: Seq[SchemaCommand], onErrorLog: ResponseLogging): F[Unit] = val req = Method @@ -97,6 +97,40 @@ private class SolrClientImpl[F[_]: Async](val config: SolrConfig, underlying: Cl ) } + def getSchema: F[SchemaResponse] = + val url = solrUrl / "schema" + val req = Method.GET(url) + underlying.expect[SchemaResponse](req) + + def getStatus: F[StatusResponse] = + val url = config.baseUrl / "api" / "cores" + val req = Method.GET(url) + underlying.expect[StatusResponse](req) + + def createCore(name: String, configSet: Option[String]): F[Unit] = + val url = config.baseUrl / "api" / "cores" + val req = Method + .POST(CreateCoreRequest(name, configSet.getOrElse("_default")), url) + .withBasicAuth(credentials) + underlying.fetchAs[CoreResponse](req).flatMap { resp => + resp.error.map(_.message) match + case Some(msg) => + Async[F].raiseError(new Exception(s"Creating core '$name' failed: $msg")) + case None => ().pure[F] + } + + def deleteCore(name: String): F[Unit] = + val url = config.baseUrl / "api" / "cores" / name + val req = Method + .POST(DeleteCoreRequest(true, true), url) + .withBasicAuth(credentials) + underlying.fetchAs[CoreResponse](req).flatMap { resp => + resp.error.map(_.message) match + case Some(msg) => + Async[F].raiseError(new Exception(s"Deleting core '$name' failed: $msg")) + case None => ().pure[F] + } + private def makeUpdateUrl = (solrUrl / "update") .withQueryParam("overwrite", "true") diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/StatusResponse.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/StatusResponse.scala new file mode 100644 index 00000000..439b6a8e --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/StatusResponse.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client + +import java.time.Instant +import java.time.format.DateTimeParseException + +import cats.syntax.all.* + +import io.bullet.borer.Decoder +import io.bullet.borer.derivation.MapBasedCodecs + +final case class StatusResponse( + responseHeader: ResponseHeader, + status: Map[String, StatusResponse.CoreStatus] = Map.empty +) + +object StatusResponse: + private given Decoder[Instant] = + Decoder.forString.mapEither { v => + Either + .catchOnly[DateTimeParseException](Instant.parse(v)) + .leftMap(_.getMessage) + } + + final case class IndexStatus( + numDocs: Long, + maxDoc: Long, + version: Long, + current: Boolean, + segmentCount: Long, + hasDeletions: Boolean, + sizeInBytes: Long + ) + + object IndexStatus: + given Decoder[IndexStatus] = MapBasedCodecs.deriveDecoder + + final case class CoreStatus( + name: String, + uptime: Long, + startTime: Instant, + index: IndexStatus + ) + + object CoreStatus: + given Decoder[CoreStatus] = MapBasedCodecs.deriveDecoder + + given Decoder[StatusResponse] = MapBasedCodecs.deriveDecoder diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Analyzer.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Analyzer.scala index 9e5e1e0e..333d3558 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Analyzer.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Analyzer.scala @@ -33,6 +33,12 @@ object Analyzer: case Query case None + object AnalyzerType: + def fromString(str: String): Either[String, AnalyzerType] = + AnalyzerType.values + .find(_.productPrefix.equalsIgnoreCase(str)) + .toRight(s"Invalid analyzer type: $str") + def index(tokenizer: Tokenizer, filters: Filter*): Analyzer = Analyzer(tokenizer, AnalyzerType.Index, filters) diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/CoreSchema.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/CoreSchema.scala new file mode 100644 index 00000000..c85b4e71 --- /dev/null +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/CoreSchema.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.solr.client.schema + +final case class CoreSchema( + name: String, + version: Double, + uniqueKey: FieldName, + fieldTypes: List[FieldType] = Nil, + fields: List[Field] = Nil, + dynamicFields: List[DynamicFieldRule] = Nil, + copyFields: List[CopyFieldRule] = Nil +) diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Field.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Field.scala index 51d8cac0..50de9bbf 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Field.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/Field.scala @@ -18,37 +18,25 @@ package io.renku.solr.client.schema -import io.bullet.borer.Encoder -import io.bullet.borer.derivation.MapBasedCodecs.deriveEncoder +import io.bullet.borer.derivation.MapBasedCodecs import io.bullet.borer.derivation.key +import io.bullet.borer.{Decoder, Encoder} final case class Field( name: FieldName, @key("type") typeName: TypeName, - required: Boolean, - indexed: Boolean, - stored: Boolean, - multiValued: Boolean, - uninvertible: Boolean, - docValues: Boolean + required: Boolean = false, + indexed: Boolean = true, + stored: Boolean = true, + multiValued: Boolean = false, + uninvertible: Boolean = true, + docValues: Boolean = false ): def makeMultiValued: Field = copy(multiValued = true) object Field: - - def apply(name: FieldName, typeName: TypeName): Field = - Field( - name = name, - typeName = typeName, - required = false, - indexed = true, - stored = true, - multiValued = false, - uninvertible = true, - docValues = false - ) - def apply(name: FieldName, fieldType: FieldType): Field = apply(name, fieldType.name) - given Encoder[Field] = deriveEncoder + given Encoder[Field] = MapBasedCodecs.deriveEncoder + given Decoder[Field] = MapBasedCodecs.deriveDecoder diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldName.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldName.scala index 26ffc4ee..de42cbb9 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldName.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldName.scala @@ -18,6 +18,7 @@ package io.renku.solr.client.schema +import io.bullet.borer.Decoder import io.bullet.borer.Encoder opaque type FieldName = String @@ -30,3 +31,4 @@ object FieldName: extension (self: FieldName) def name: String = self given Encoder[FieldName] = Encoder.forString + given Decoder[FieldName] = Decoder.forString diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldTypeClass.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldTypeClass.scala index 5598f483..583e4283 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldTypeClass.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/FieldTypeClass.scala @@ -18,6 +18,7 @@ package io.renku.solr.client.schema +import io.bullet.borer.Decoder import io.bullet.borer.Encoder opaque type FieldTypeClass = String @@ -43,3 +44,4 @@ object FieldTypeClass: val nestedPath: FieldTypeClass = "solr.NestPathField" given Encoder[FieldTypeClass] = Encoder.forString + given Decoder[FieldTypeClass] = Decoder.forString diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/SchemaJsonCodec.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/SchemaJsonCodec.scala index 87186821..9c2d8c80 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/SchemaJsonCodec.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/SchemaJsonCodec.scala @@ -19,20 +19,37 @@ package io.renku.solr.client.schema import io.bullet.borer.NullOptions.given -import io.bullet.borer.derivation.MapBasedCodecs.deriveEncoder -import io.bullet.borer.{Encoder, Writer} +import io.bullet.borer.derivation.MapBasedCodecs +import io.bullet.borer.{Decoder, Encoder, Writer} import io.renku.solr.client.schema.SchemaCommand.Element trait SchemaJsonCodec { - given Encoder[Tokenizer] = deriveEncoder - given Encoder[Filter] = deriveEncoder + given Encoder[Tokenizer] = MapBasedCodecs.deriveEncoder + given Decoder[Tokenizer] = MapBasedCodecs.deriveDecoder + + given Encoder[Filter] = MapBasedCodecs.deriveEncoder + given Decoder[Filter] = MapBasedCodecs.deriveDecoder + given Encoder[Analyzer.AnalyzerType] = Encoder.forString.contramap(_.productPrefix.toLowerCase) - given Encoder[Analyzer] = deriveEncoder - given Encoder[FieldType] = deriveEncoder - given Encoder[DynamicFieldRule] = deriveEncoder - given Encoder[CopyFieldRule] = deriveEncoder + given Decoder[Analyzer.AnalyzerType] = + Decoder.forString.mapEither(Analyzer.AnalyzerType.fromString) + + given Encoder[Analyzer] = MapBasedCodecs.deriveEncoder + given Decoder[Analyzer] = MapBasedCodecs.deriveDecoder + + given Encoder[FieldType] = MapBasedCodecs.deriveEncoder + given Decoder[FieldType] = MapBasedCodecs.deriveDecoder + + given Encoder[DynamicFieldRule] = MapBasedCodecs.deriveEncoder + given Decoder[DynamicFieldRule] = MapBasedCodecs.deriveDecoder + + given Encoder[CopyFieldRule] = MapBasedCodecs.deriveEncoder + given Decoder[CopyFieldRule] = MapBasedCodecs.deriveDecoder + + given Decoder[CoreSchema] = MapBasedCodecs.deriveDecoder + given Encoder[CoreSchema] = MapBasedCodecs.deriveEncoder given (using e1: Encoder[Field], diff --git a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/TypeName.scala b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/TypeName.scala index 0b51ade0..83f07962 100644 --- a/modules/solr-client/src/main/scala/io/renku/solr/client/schema/TypeName.scala +++ b/modules/solr-client/src/main/scala/io/renku/solr/client/schema/TypeName.scala @@ -18,6 +18,7 @@ package io.renku.solr.client.schema +import io.bullet.borer.Decoder import io.bullet.borer.Encoder opaque type TypeName = String @@ -28,3 +29,4 @@ object TypeName: extension (self: TypeName) def name: String = self given Encoder[TypeName] = Encoder.forString + given Decoder[TypeName] = Decoder.forString diff --git a/modules/solr-client/src/test/resources/schema-response.json b/modules/solr-client/src/test/resources/schema-response.json new file mode 100644 index 00000000..facd6513 --- /dev/null +++ b/modules/solr-client/src/test/resources/schema-response.json @@ -0,0 +1,2264 @@ +{ + "responseHeader": { + "status": 0, + "QTime": 36 + }, + "schema": { + "name": "default-config", + "version": 1.6, + "uniqueKey": "id", + "fieldTypes": [ + { + "name": "SearchDateTime", + "class": "DatePointField" + }, + { + "name": "SearchId", + "class": "StrField", + "docValues": true + }, + { + "name": "SearchString", + "class": "StrField", + "docValues": true + }, + { + "name": "SearchText", + "class": "TextField", + "analyzer": { + "tokenizer": { + "name": "uax29UrlEmail" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop" + }, + { + "name": "englishMinimalStem" + }, + { + "name": "asciiFolding" + } + ] + } + }, + { + "name": "SearchTextAll", + "class": "TextField", + "multiValued": true, + "analyzer": { + "tokenizer": { + "name": "uax29UrlEmail" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop" + }, + { + "name": "englishMinimalStem" + }, + { + "name": "asciiFolding" + } + ] + } + }, + { + "name": "_nest_path_", + "class": "solr.NestPathField", + "maxCharsForDocValues": "-1", + "omitNorms": true, + "omitTermFreqAndPositions": true, + "stored": false, + "multiValued": false + }, + { + "name": "ancestor_path", + "class": "solr.TextField", + "indexAnalyzer": { + "tokenizer": { + "name": "keyword" + } + }, + "queryAnalyzer": { + "tokenizer": { + "name": "pathHierarchy", + "delimiter": "/" + } + } + }, + { + "name": "binary", + "class": "solr.BinaryField" + }, + { + "name": "boolean", + "class": "solr.BoolField", + "sortMissingLast": true + }, + { + "name": "booleans", + "class": "solr.BoolField", + "sortMissingLast": true, + "multiValued": true + }, + { + "name": "delimited_payloads_float", + "class": "solr.TextField", + "indexed": true, + "stored": false, + "analyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "delimitedPayload", + "encoder": "float" + } + ] + } + }, + { + "name": "delimited_payloads_int", + "class": "solr.TextField", + "indexed": true, + "stored": false, + "analyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "delimitedPayload", + "encoder": "integer" + } + ] + } + }, + { + "name": "delimited_payloads_string", + "class": "solr.TextField", + "indexed": true, + "stored": false, + "analyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "delimitedPayload", + "encoder": "identity" + } + ] + } + }, + { + "name": "descendent_path", + "class": "solr.TextField", + "indexAnalyzer": { + "tokenizer": { + "name": "pathHierarchy", + "delimiter": "/" + } + }, + "queryAnalyzer": { + "tokenizer": { + "name": "keyword" + } + } + }, + { + "name": "ignored", + "class": "solr.StrField", + "indexed": false, + "stored": false, + "multiValued": true + }, + { + "name": "location", + "class": "solr.LatLonPointSpatialField", + "docValues": true + }, + { + "name": "location_rpt", + "class": "solr.SpatialRecursivePrefixTreeFieldType", + "geo": "true", + "maxDistErr": "0.001", + "distErrPct": "0.025", + "distanceUnits": "kilometers" + }, + { + "name": "lowercase", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "keyword" + }, + "filters": [ + { + "name": "lowercase" + } + ] + } + }, + { + "name": "pdate", + "class": "solr.DatePointField", + "docValues": true + }, + { + "name": "pdates", + "class": "solr.DatePointField", + "docValues": true, + "multiValued": true + }, + { + "name": "pdouble", + "class": "solr.DoublePointField", + "docValues": true + }, + { + "name": "pdoubles", + "class": "solr.DoublePointField", + "docValues": true, + "multiValued": true + }, + { + "name": "pfloat", + "class": "solr.FloatPointField", + "docValues": true + }, + { + "name": "pfloats", + "class": "solr.FloatPointField", + "docValues": true, + "multiValued": true + }, + { + "name": "phonetic_en", + "class": "solr.TextField", + "indexed": true, + "stored": false, + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "inject": "false", + "name": "doubleMetaphone" + } + ] + } + }, + { + "name": "pint", + "class": "solr.IntPointField", + "docValues": true + }, + { + "name": "pints", + "class": "solr.IntPointField", + "docValues": true, + "multiValued": true + }, + { + "name": "plong", + "class": "solr.LongPointField", + "docValues": true + }, + { + "name": "plongs", + "class": "solr.LongPointField", + "docValues": true, + "multiValued": true + }, + { + "name": "point", + "class": "solr.PointType", + "subFieldSuffix": "_d", + "dimension": "2" + }, + { + "name": "random", + "class": "solr.RandomSortField", + "indexed": true + }, + { + "name": "rank", + "class": "solr.RankField" + }, + { + "name": "string", + "class": "solr.StrField", + "sortMissingLast": true, + "docValues": true + }, + { + "name": "strings", + "class": "solr.StrField", + "sortMissingLast": true, + "docValues": true, + "multiValued": true + }, + { + "name": "text_ar", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_ar.txt" + }, + { + "name": "arabicNormalization" + }, + { + "name": "arabicStem" + } + ] + } + }, + { + "name": "text_bg", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_bg.txt" + }, + { + "name": "bulgarianStem" + } + ] + } + }, + { + "name": "text_ca", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "articles": "lang/contractions_ca.txt", + "name": "elision", + "ignoreCase": "true" + }, + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_ca.txt" + }, + { + "name": "snowballPorter", + "language": "Catalan" + } + ] + } + }, + { + "name": "text_cjk", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "CJKWidth" + }, + { + "name": "lowercase" + }, + { + "name": "CJKBigram" + } + ] + } + }, + { + "name": "text_cz", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_cz.txt" + }, + { + "name": "czechStem" + } + ] + } + }, + { + "name": "text_da", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_da.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Danish" + } + ] + } + }, + { + "name": "text_de", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_de.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "germanNormalization" + }, + { + "name": "germanLightStem" + } + ] + } + }, + { + "name": "text_el", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "greekLowercase" + }, + { + "name": "stop", + "ignoreCase": "false", + "words": "lang/stopwords_el.txt" + }, + { + "name": "greekStem" + } + ] + } + }, + { + "name": "text_en", + "class": "solr.TextField", + "positionIncrementGap": "100", + "indexAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "name": "lowercase" + }, + { + "name": "englishPossessive" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "porterStem" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "synonymGraph", + "expand": "true", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "name": "lowercase" + }, + { + "name": "englishPossessive" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "porterStem" + } + ] + } + }, + { + "name": "text_en_splitting", + "class": "solr.TextField", + "autoGeneratePhraseQueries": "true", + "positionIncrementGap": "100", + "indexAnalyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "catenateAll": "0", + "name": "wordDelimiterGraph", + "splitOnCaseChange": "1", + "catenateNumbers": "1", + "catenateWords": "1", + "generateNumberParts": "1", + "generateWordParts": "1" + }, + { + "name": "lowercase" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "porterStem" + }, + { + "name": "flattenGraph" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "synonymGraph", + "expand": "true", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "catenateAll": "0", + "name": "wordDelimiterGraph", + "splitOnCaseChange": "1", + "catenateNumbers": "0", + "catenateWords": "0", + "generateNumberParts": "1", + "generateWordParts": "1" + }, + { + "name": "lowercase" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "porterStem" + } + ] + } + }, + { + "name": "text_en_splitting_tight", + "class": "solr.TextField", + "autoGeneratePhraseQueries": "true", + "positionIncrementGap": "100", + "indexAnalyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "synonymGraph", + "expand": "false", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "generateNumberParts": "0", + "catenateAll": "0", + "generateWordParts": "0", + "catenateNumbers": "1", + "catenateWords": "1", + "name": "wordDelimiterGraph" + }, + { + "name": "lowercase" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "englishMinimalStem" + }, + { + "name": "removeDuplicates" + }, + { + "name": "flattenGraph" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "whitespace" + }, + "filters": [ + { + "name": "synonymGraph", + "expand": "false", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_en.txt" + }, + { + "generateNumberParts": "0", + "catenateAll": "0", + "generateWordParts": "0", + "catenateNumbers": "1", + "catenateWords": "1", + "name": "wordDelimiterGraph" + }, + { + "name": "lowercase" + }, + { + "protected": "protwords.txt", + "name": "keywordMarker" + }, + { + "name": "englishMinimalStem" + }, + { + "name": "removeDuplicates" + } + ] + } + }, + { + "name": "text_es", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_es.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "spanishLightStem" + } + ] + } + }, + { + "name": "text_et", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_et.txt" + }, + { + "name": "snowballPorter", + "language": "Estonian" + } + ] + } + }, + { + "name": "text_eu", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_eu.txt" + }, + { + "name": "snowballPorter", + "language": "Basque" + } + ] + } + }, + { + "name": "text_fa", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "charFilters": [ + { + "name": "persian" + } + ], + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "arabicNormalization" + }, + { + "name": "persianNormalization" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_fa.txt" + } + ] + } + }, + { + "name": "text_fi", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_fi.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Finnish" + } + ] + } + }, + { + "name": "text_fr", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "articles": "lang/contractions_fr.txt", + "name": "elision", + "ignoreCase": "true" + }, + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_fr.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "frenchLightStem" + } + ] + } + }, + { + "name": "text_ga", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "articles": "lang/contractions_ga.txt", + "name": "elision", + "ignoreCase": "true" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/hyphenations_ga.txt" + }, + { + "name": "irishLowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_ga.txt" + }, + { + "name": "snowballPorter", + "language": "Irish" + } + ] + } + }, + { + "name": "text_gen_sort", + "class": "solr.SortableTextField", + "positionIncrementGap": "100", + "multiValued": true, + "indexAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "lowercase" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "synonymGraph", + "expand": "true", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "lowercase" + } + ] + } + }, + { + "name": "text_general", + "class": "solr.TextField", + "positionIncrementGap": "100", + "multiValued": true, + "indexAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "lowercase" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "synonymGraph", + "expand": "true", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "lowercase" + } + ] + } + }, + { + "name": "text_general_rev", + "class": "solr.TextField", + "positionIncrementGap": "100", + "indexAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "lowercase" + }, + { + "withOriginal": "true", + "maxPosAsterisk": "3", + "maxFractionAsterisk": "0.33", + "name": "reversedWildcard", + "maxPosQuestion": "2" + } + ] + }, + "queryAnalyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "synonymGraph", + "expand": "true", + "ignoreCase": "true", + "synonyms": "synonyms.txt" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "stopwords.txt" + }, + { + "name": "lowercase" + } + ] + } + }, + { + "name": "text_gl", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_gl.txt" + }, + { + "name": "galicianStem" + } + ] + } + }, + { + "name": "text_hi", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "indicNormalization" + }, + { + "name": "hindiNormalization" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_hi.txt" + }, + { + "name": "hindiStem" + } + ] + } + }, + { + "name": "text_hu", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_hu.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Hungarian" + } + ] + } + }, + { + "name": "text_hy", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_hy.txt" + }, + { + "name": "snowballPorter", + "language": "Armenian" + } + ] + } + }, + { + "name": "text_id", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_id.txt" + }, + { + "name": "indonesianStem", + "stemDerivational": "true" + } + ] + } + }, + { + "name": "text_it", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "articles": "lang/contractions_it.txt", + "name": "elision", + "ignoreCase": "true" + }, + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_it.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "italianLightStem" + } + ] + } + }, + { + "name": "text_ja", + "class": "solr.TextField", + "autoGeneratePhraseQueries": "false", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "japanese", + "mode": "search" + }, + "filters": [ + { + "name": "japaneseBaseForm" + }, + { + "name": "japanesePartOfSpeechStop", + "tags": "lang/stoptags_ja.txt" + }, + { + "name": "cjkWidth" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_ja.txt" + }, + { + "name": "japaneseKatakanaStem", + "minimumLength": "4" + }, + { + "name": "lowercase" + } + ] + } + }, + { + "name": "text_ko", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "decompoundMode": "discard", + "outputUnknownUnigrams": "false", + "name": "korean" + }, + "filters": [ + { + "name": "koreanPartOfSpeechStop" + }, + { + "name": "koreanReadingForm" + }, + { + "name": "lowercase" + } + ] + } + }, + { + "name": "text_lv", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_lv.txt" + }, + { + "name": "latvianStem" + } + ] + } + }, + { + "name": "text_nl", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_nl.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "dictionary": "lang/stemdict_nl.txt", + "name": "stemmerOverride", + "ignoreCase": "false" + }, + { + "name": "snowballPorter", + "language": "Dutch" + } + ] + } + }, + { + "name": "text_no", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_no.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Norwegian" + } + ] + } + }, + { + "name": "text_pt", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_pt.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "portugueseLightStem" + } + ] + } + }, + { + "name": "text_ro", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_ro.txt" + }, + { + "name": "snowballPorter", + "language": "Romanian" + } + ] + } + }, + { + "name": "text_ru", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_ru.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Russian" + } + ] + } + }, + { + "name": "text_sv", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "words": "lang/stopwords_sv.txt", + "name": "stop", + "format": "snowball", + "ignoreCase": "true" + }, + { + "name": "snowballPorter", + "language": "Swedish" + } + ] + } + }, + { + "name": "text_th", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "thai" + }, + "filters": [ + { + "name": "lowercase" + }, + { + "name": "stop", + "ignoreCase": "true", + "words": "lang/stopwords_th.txt" + } + ] + } + }, + { + "name": "text_tr", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "standard" + }, + "filters": [ + { + "name": "turkishLowercase" + }, + { + "name": "stop", + "ignoreCase": "false", + "words": "lang/stopwords_tr.txt" + }, + { + "name": "snowballPorter", + "language": "Turkish" + } + ] + } + }, + { + "name": "text_ws", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "name": "whitespace" + } + } + } + ], + "fields": [ + { + "name": "_kind", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "_nest_parent_", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "_nest_path_", + "type": "_nest_path_" + }, + { + "name": "_root_", + "type": "string", + "indexed": true, + "stored": false, + "docValues": false + }, + { + "name": "_text_", + "type": "text_general", + "multiValued": true, + "indexed": true, + "stored": false + }, + { + "name": "_type", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "_version_", + "type": "plong", + "indexed": false, + "stored": false + }, + { + "name": "content_all", + "type": "SearchTextAll", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "createdBy", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "creationDate", + "type": "SearchDateTime", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "currentSchemaVersion", + "type": "plong", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "description", + "type": "SearchText", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "editors", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "firstName", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "groupEditors", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "groupOwners", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "groupViewers", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "id", + "type": "string", + "multiValued": false, + "indexed": true, + "required": true, + "stored": true + }, + { + "name": "keywords", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "lastName", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "members", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "members_all", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "name", + "type": "SearchText", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "namespace", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "owners", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "repositories", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "score", + "type": "text_general" + }, + { + "name": "slug", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "viewers", + "type": "SearchId", + "uninvertible": true, + "docValues": false, + "multiValued": true, + "indexed": true, + "required": false, + "stored": true + }, + { + "name": "visibility", + "type": "SearchString", + "uninvertible": true, + "docValues": false, + "multiValued": false, + "indexed": true, + "required": false, + "stored": true + } + ], + "dynamicFields": [ + { + "name": "*_txt_en_split_tight", + "type": "text_en_splitting_tight", + "indexed": true, + "stored": true + }, + { + "name": "*_descendent_path", + "type": "descendent_path", + "indexed": true, + "stored": true + }, + { + "name": "*_ancestor_path", + "type": "ancestor_path", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_en_split", + "type": "text_en_splitting", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_sort", + "type": "text_gen_sort", + "indexed": true, + "stored": true + }, + { + "name": "ignored_*", + "type": "ignored" + }, + { + "name": "*_txt_rev", + "type": "text_general_rev", + "indexed": true, + "stored": true + }, + { + "name": "*_phon_en", + "type": "phonetic_en", + "indexed": true, + "stored": true + }, + { + "name": "*_s_lower", + "type": "lowercase", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_cjk", + "type": "text_cjk", + "indexed": true, + "stored": true + }, + { + "name": "random_*", + "type": "random" + }, + { + "name": "*_t_sort", + "type": "text_gen_sort", + "multiValued": false, + "indexed": true, + "stored": true + }, + { + "name": "*_txt_en", + "type": "text_en", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ar", + "type": "text_ar", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_bg", + "type": "text_bg", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ca", + "type": "text_ca", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_cz", + "type": "text_cz", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_da", + "type": "text_da", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_de", + "type": "text_de", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_el", + "type": "text_el", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_es", + "type": "text_es", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_et", + "type": "text_et", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_eu", + "type": "text_eu", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_fa", + "type": "text_fa", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_fi", + "type": "text_fi", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_fr", + "type": "text_fr", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ga", + "type": "text_ga", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_gl", + "type": "text_gl", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_hi", + "type": "text_hi", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_hu", + "type": "text_hu", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_hy", + "type": "text_hy", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_id", + "type": "text_id", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_it", + "type": "text_it", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ja", + "type": "text_ja", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ko", + "type": "text_ko", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_lv", + "type": "text_lv", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_nl", + "type": "text_nl", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_no", + "type": "text_no", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_pt", + "type": "text_pt", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ro", + "type": "text_ro", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_ru", + "type": "text_ru", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_sv", + "type": "text_sv", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_th", + "type": "text_th", + "indexed": true, + "stored": true + }, + { + "name": "*_txt_tr", + "type": "text_tr", + "indexed": true, + "stored": true + }, + { + "name": "*_point", + "type": "point", + "indexed": true, + "stored": true + }, + { + "name": "*_srpt", + "type": "location_rpt", + "indexed": true, + "stored": true + }, + { + "name": "attr_*", + "type": "text_general", + "multiValued": true, + "indexed": true, + "stored": true + }, + { + "name": "*_dts", + "type": "pdates", + "indexed": true, + "stored": true + }, + { + "name": "*_txt", + "type": "text_general", + "indexed": true, + "stored": true + }, + { + "name": "*_str", + "type": "strings", + "docValues": true, + "indexed": false, + "stored": false, + "useDocValuesAsStored": false + }, + { + "name": "*_dpf", + "type": "delimited_payloads_float", + "indexed": true, + "stored": true + }, + { + "name": "*_dpi", + "type": "delimited_payloads_int", + "indexed": true, + "stored": true + }, + { + "name": "*_dps", + "type": "delimited_payloads_string", + "indexed": true, + "stored": true + }, + { + "name": "*_is", + "type": "pints", + "indexed": true, + "stored": true + }, + { + "name": "*_ss", + "type": "strings", + "indexed": true, + "stored": true + }, + { + "name": "*_ls", + "type": "plongs", + "indexed": true, + "stored": true + }, + { + "name": "*_bs", + "type": "booleans", + "indexed": true, + "stored": true + }, + { + "name": "*_fs", + "type": "pfloats", + "indexed": true, + "stored": true + }, + { + "name": "*_ds", + "type": "pdoubles", + "indexed": true, + "stored": true + }, + { + "name": "*_dt", + "type": "pdate", + "indexed": true, + "stored": true + }, + { + "name": "*_ws", + "type": "text_ws", + "indexed": true, + "stored": true + }, + { + "name": "*_i", + "type": "pint", + "indexed": true, + "stored": true + }, + { + "name": "*_s", + "type": "string", + "indexed": true, + "stored": true + }, + { + "name": "*_l", + "type": "plong", + "indexed": true, + "stored": true + }, + { + "name": "*_b", + "type": "boolean", + "indexed": true, + "stored": true + }, + { + "name": "*_f", + "type": "pfloat", + "indexed": true, + "stored": true + }, + { + "name": "*_d", + "type": "pdouble", + "indexed": true, + "stored": true + }, + { + "name": "*_t", + "type": "text_general", + "multiValued": false, + "indexed": true, + "stored": true + }, + { + "name": "*_p", + "type": "location", + "indexed": true, + "stored": true + } + ], + "copyFields": [ + { + "source": "description", + "dest": "content_all" + }, + { + "source": "editors", + "dest": "members_all" + }, + { + "source": "firstName", + "dest": "content_all" + }, + { + "source": "groupEditors", + "dest": "members_all" + }, + { + "source": "groupOwners", + "dest": "members_all" + }, + { + "source": "groupViewers", + "dest": "members_all" + }, + { + "source": "keywords", + "dest": "content_all" + }, + { + "source": "lastName", + "dest": "content_all" + }, + { + "source": "members", + "dest": "members_all" + }, + { + "source": "name", + "dest": "content_all" + }, + { + "source": "namespace", + "dest": "content_all" + }, + { + "source": "owners", + "dest": "members_all" + }, + { + "source": "repositories", + "dest": "content_all" + }, + { + "source": "slug", + "dest": "content_all" + }, + { + "source": "viewers", + "dest": "members_all" + }, + { + "source": "score", + "dest": "score_str", + "maxChars": 256 + } + ] + } +} diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/SearchCaseInsensitiveSpec.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/SearchCaseInsensitiveSpec.scala index 648846a7..3ef33396 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/SearchCaseInsensitiveSpec.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/SearchCaseInsensitiveSpec.scala @@ -27,10 +27,14 @@ import io.bullet.borer.derivation.key import io.renku.solr.client.SearchCaseInsensitiveSpec.TestData import io.renku.solr.client.schema.* import io.renku.solr.client.util.SolrClientBaseSuite +import munit.CatsEffectSuite -class SearchCaseInsensitiveSpec extends SolrClientBaseSuite: +class SearchCaseInsensitiveSpec extends CatsEffectSuite with SolrClientBaseSuite: private val logger = scribe.cats.io - override protected lazy val coreName: String = server.testCoreName2 + + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, solrClient) + private val migrations = Seq( SchemaCommand.Add(FieldType.text(TypeName("my_text_field"), Analyzer.defaultSearch)), SchemaCommand.Add(Field(FieldName("my_name"), TypeName("my_text_field"))) @@ -43,27 +47,27 @@ class SearchCaseInsensitiveSpec extends SolrClientBaseSuite: Seq(TypeName("my_text_field")) ) - test("search case insensitive"): - withSolrClient().use { client => - for { - _ <- truncate(client) - _ <- client.modifySchema(migrations) - _ <- client.upsert(TestData.sample) + test("search case insensitive") { + for { + client <- IO(solrClient()) + _ <- truncate(client) + _ <- client.modifySchema(migrations) + _ <- client.upsert(TestData.sample) - // find pogacar without this Č character - r1 <- client.query[TestData](QueryString("my_name:pogacar")) - _ = assertEquals(r1.responseBody.docs.head, TestData.get(11)) - // find pogi with that Č character - r2 <- client.query[TestData](QueryString("my_name:POGAČAR")) - _ = assertEquals(r2.responseBody.docs.head, TestData.get(11)) - // find with umlaut - r3 <- client.query[TestData](QueryString("my_name:über")) - _ = assertEquals(r3.responseBody.docs.head, TestData.get(31)) - // find without umlaut - r4 <- client.query[TestData](QueryString("my_name:uber")) - _ = assertEquals(r4.responseBody.docs.head, TestData.get(31)) - } yield () - } + // find pogacar without this Č character + r1 <- client.query[TestData](QueryString("my_name:pogacar")) + _ = assertEquals(r1.responseBody.docs.head, TestData.get(11)) + // find pogi with that Č character + r2 <- client.query[TestData](QueryString("my_name:POGAČAR")) + _ = assertEquals(r2.responseBody.docs.head, TestData.get(11)) + // find with umlaut + r3 <- client.query[TestData](QueryString("my_name:über")) + _ = assertEquals(r3.responseBody.docs.head, TestData.get(31)) + // find without umlaut + r4 <- client.query[TestData](QueryString("my_name:uber")) + _ = assertEquals(r4.responseBody.docs.head, TestData.get(31)) + } yield () + } object SearchCaseInsensitiveSpec: def idQuery: String = s"id:${getClass.getSimpleName}*" diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/SolrClientSpec.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/SolrClientSpec.scala index 612e0a7a..691857b9 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/SolrClientSpec.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/SolrClientSpec.scala @@ -33,40 +33,48 @@ import io.renku.solr.client.SolrClientSpec.{Course, Room} import io.renku.solr.client.facet.{Facet, Facets} import io.renku.solr.client.schema.* import io.renku.solr.client.util.SolrClientBaseSuite +import munit.CatsEffectSuite import munit.ScalaCheckEffectSuite import org.scalacheck.Gen import org.scalacheck.effect.PropF -class SolrClientSpec extends SolrClientBaseSuite with ScalaCheckEffectSuite: - test("optimistic locking: fail if exists"): - withSolrClient().use { client => - val c0 = Course("c1", "fp in scala", DocVersion.NotExists) - for { - _ <- client.deleteIds(NonEmptyList.of(c0.id)) - r0 <- client.upsert(Seq(c0)) - _ = assert(r0.isSuccess, clue = "Expected successful insert") - - rs <- client.findById[Course](c0.id) - fetched = rs.responseBody.docs.head - _ = assert( - fetched.version.asLong > 0, - clue = "stored entity version must be > 0" - ) - _ = assert( - fetched.copy(version = c0.version) == c0, - clue = "stored entity not as expected" - ) - - r1 <- client.upsert(Seq(c0)) - _ = assertEquals( - r1, - UpsertResponse.VersionConflict, - clue = "Expected VersionConflict" - ) - } yield () - } - - test("use schema for inserting and querying"): +class SolrClientSpec + extends CatsEffectSuite + with SolrClientBaseSuite + with ScalaCheckEffectSuite: + + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, solrClient) + + test("optimistic locking: fail if exists") { + val c0 = Course("c1", "fp in scala", DocVersion.NotExists) + for { + client <- IO(solrClient()) + _ <- client.deleteIds(NonEmptyList.of(c0.id)) + r0 <- client.upsert(Seq(c0)) + _ = assert(r0.isSuccess, clue = "Expected successful insert") + + rs <- client.findById[Course](c0.id) + fetched = rs.responseBody.docs.head + _ = assert( + fetched.version.asLong > 0, + clue = "stored entity version must be > 0" + ) + _ = assert( + fetched.copy(version = c0.version) == c0, + clue = "stored entity not as expected" + ) + + r1 <- client.upsert(Seq(c0)) + _ = assertEquals( + r1, + UpsertResponse.VersionConflict, + clue = "Expected VersionConflict" + ) + } yield () + } + + test("use schema for inserting and querying") { val cmds = Seq( SchemaCommand.Add(FieldType.text(TypeName("roomText"), Analyzer.classic)), SchemaCommand.Add(FieldType.int(TypeName("roomInt"))), @@ -75,68 +83,97 @@ class SolrClientSpec extends SolrClientBaseSuite with ScalaCheckEffectSuite: SchemaCommand.Add(Field(FieldName("roomSeats"), TypeName("roomInt"))) ) - withSolrClient().use { client => - val room = Room(UUID.randomUUID().toString, "meeting room", "room for meetings", 56) - for { - _ <- truncateAll(client)( - List("roomName", "roomDescription", "roomSeats").map(FieldName.apply), - List("roomText", "roomInt").map(TypeName.apply) - ) - _ <- client.modifySchema(cmds) - _ <- client.upsert[Room](Seq(room)) - qr <- client.query[Room](QueryData(QueryString("_type_s:Room"))) - _ = qr.responseBody.docs contains room - ir <- client.findById[Room](room.id) - _ = ir.responseBody.docs contains room - } yield () - } - - test("correct facet queries"): + val room = Room(UUID.randomUUID().toString, "meeting room", "room for meetings", 56) + for { + client <- IO(solrClient()) + _ <- truncateAll(client)( + List("roomName", "roomDescription", "roomSeats").map(FieldName.apply), + List("roomText", "roomInt").map(TypeName.apply) + ) + _ <- client.modifySchema(cmds) + _ <- client.upsert[Room](Seq(room)) + qr <- client.query[Room](QueryData(QueryString("_type_s:Room"))) + _ = qr.responseBody.docs contains room + ir <- client.findById[Room](room.id) + _ = ir.responseBody.docs contains room + } yield () + } + + test("correct facet queries") { val decoder: Decoder[Unit] = new Decoder { def read(r: Reader): Unit = r.skipElement() () } + val client = solrClient() PropF.forAllF(SolrClientGenerator.facets) { facets => val q = QueryData(QueryString("*:*")).withFacet(facets) - withSolrClient().use { client => - client.query(q)(using decoder).void - } + client.query(q)(using decoder).void } + } - test("decoding facet response"): + test("decoding facet response") { val rooms = Gen.listOfN(15, Room.gen).sample.get val facets = Facets(Facet.Terms(FieldName("by_name"), FieldName("roomName"), limit = Some(6))) - withSolrClient().use { client => - for { - _ <- client.delete(QueryString("_type_s:Room")) - _ <- client.upsert(rooms) - r <- client.query[Room](QueryData(QueryString("_type_s:Room")).withFacet(facets)) - _ = assert(r.facetResponse.nonEmpty) - _ = assertEquals(r.facetResponse.get.count, 15) - _ = assertEquals( - r.facetResponse.get.buckets(FieldName("by_name")).buckets.size, - 6 - ) - } yield () - } - test("delete by id"): - withSolrClient().use { client => - for { - id <- IO(Gen.uuid.generateOne).map(_.toString) - _ <- client.delete(QueryString("_type_s:Person")) - _ <- client.upsert(Seq(SolrClientSpec.Person(id, "John"))) - r <- client.query[SolrClientSpec.Person](QueryData(QueryString(s"id:$id"))) - p = r.responseBody.docs.head - _ = assertEquals(p.id, id) - _ <- client.deleteIds(NonEmptyList.of(id)) - _ <- IO.sleep(10.millis) - r2 <- client.query[SolrClientSpec.Person](QueryData(QueryString(s"id:$id"))) - _ = assert(r2.responseBody.docs.isEmpty) - } yield () - } + for { + client <- IO(solrClient()) + _ <- client.delete(QueryString("_type_s:Room")) + _ <- client.upsert(rooms) + r <- client.query[Room](QueryData(QueryString("_type_s:Room")).withFacet(facets)) + _ = assert(r.facetResponse.nonEmpty) + _ = assertEquals(r.facetResponse.get.count, 15) + _ = assertEquals( + r.facetResponse.get.buckets(FieldName("by_name")).buckets.size, + 6 + ) + } yield () + } + + test("delete by id") { + for { + client <- IO(solrClient()) + id <- IO(Gen.uuid.generateOne).map(_.toString) + _ <- client.delete(QueryString("_type_s:Person")) + _ <- client.upsert(Seq(SolrClientSpec.Person(id, "John"))) + r <- client.query[SolrClientSpec.Person](QueryData(QueryString(s"id:$id"))) + p = r.responseBody.docs.head + _ = assertEquals(p.id, id) + _ <- client.deleteIds(NonEmptyList.of(id)) + _ <- IO.sleep(10.millis) + r2 <- client.query[SolrClientSpec.Person](QueryData(QueryString(s"id:$id"))) + _ = assert(r2.responseBody.docs.isEmpty) + } yield () + } + + test("create and delete core") { + val name = Gen + .choose(5, 12) + .flatMap(n => Gen.listOfN(n, Gen.alphaChar)) + .map(_.mkString) + .generateOne + + for + client <- IO(solrClient()) + _ <- client.createCore(name) + s1 <- client.getStatus + _ = assert(s1.status.keySet.contains(name), s"core $name not available") + id <- IO(Gen.uuid.generateOne).map(_.toString) + _ <- client.upsert(Seq(SolrClientSpec.Person(id, "John"))) + _ <- client.query[SolrClientSpec.Person](QueryData(QueryString(s"id:$id"))) + _ <- client.deleteCore(name) + s2 <- client.getStatus + _ = assert(!s2.status.keySet.contains(name), s"core $name is not deleted") + yield () + } + + test("Obtain schema"): + for + client <- IO(solrClient()) + schema <- client.getSchema + _ = assert(schema.schema.fieldTypes.nonEmpty) + yield () object SolrClientSpec: case class Room(id: String, roomName: String, roomDescription: String, roomSeats: Int) diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/migration/SolrMigratorSpec.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/migration/SolrMigratorSpec.scala index f2c5b037..6cf59d67 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/migration/SolrMigratorSpec.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/migration/SolrMigratorSpec.scala @@ -24,10 +24,14 @@ import io.renku.solr.client.SolrClient import io.renku.solr.client.schema.* import io.renku.solr.client.schema.SchemaCommand.Add import io.renku.solr.client.util.SolrClientBaseSuite +import munit.CatsEffectSuite -class SolrMigratorSpec extends SolrClientBaseSuite: +class SolrMigratorSpec extends CatsEffectSuite with SolrClientBaseSuite: private val logger = scribe.cats.io - override protected lazy val coreName: String = server.testCoreName3 + + override def munitFixtures: Seq[munit.AnyFixture[?]] = + List(solrServer, solrClient) + private val migrations = Seq( SchemaMigration(-5, Add(FieldType.text(TypeName("testText"), Analyzer.classic))), SchemaMigration(-4, Add(FieldType.int(TypeName("testInt")))), @@ -47,29 +51,29 @@ class SolrMigratorSpec extends SolrClientBaseSuite: Seq(TypeName("testText"), TypeName("testInt")) ) - test("run sample migrations"): - withSolrClient().use { client => - val migrator = SchemaMigrator[IO](client) - for { - _ <- truncate(client) - _ <- migrator.migrate(migrations) - c <- migrator.currentVersion - _ = assertEquals(c, Some(-1L)) - } yield () - } + test("run sample migrations") { + for { + client <- IO(solrClient()) + migrator = SchemaMigrator[IO](client) + _ <- truncate(client) + _ <- migrator.migrate(migrations) + c <- migrator.currentVersion + _ = assertEquals(c, Some(-1L)) + } yield () + } test("run migrations"): - withSolrClient().use { client => - val migrator = SchemaMigrator(client) - val first = migrations.take(2) - for { - _ <- truncate(client) - _ <- migrator.migrate(first) - v0 <- migrator.currentVersion - _ = assertEquals(v0, Some(-4L)) + for { + client <- IO(solrClient()) + migrator = SchemaMigrator(client) + first = migrations.take(2) + + _ <- truncate(client) + _ <- migrator.migrate(first) + v0 <- migrator.currentVersion + _ = assertEquals(v0, Some(-4L)) - _ <- migrator.migrate(migrations) - v1 <- migrator.currentVersion - _ = assertEquals(v1, Some(-1L)) - } yield () - } + _ <- migrator.migrate(migrations) + v1 <- migrator.currentVersion + _ = assertEquals(v1, Some(-1L)) + } yield () diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/schema/BorerJsonCodecTest.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/schema/BorerJsonCodecTest.scala index 81038feb..57827367 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/schema/BorerJsonCodecTest.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/schema/BorerJsonCodecTest.scala @@ -18,7 +18,10 @@ package io.renku.solr.client.schema +import scala.io.Source + import io.bullet.borer.Json +import io.renku.solr.client.SchemaResponse import io.renku.solr.client.schema.SchemaCommand.DeleteType import munit.FunSuite @@ -35,17 +38,29 @@ class BorerJsonCodecTest extends FunSuite with SchemaJsonCodec { val v = SchemaCommand.Add(Field(FieldName("description"), TypeName("integer"))) assertEquals( Json.encode(v).toUtf8String, - """{"add-field":{"name":"description","type":"integer","required":false,"indexed":true,"stored":true,"multiValued":false,"uninvertible":true,"docValues":false}}""" + """{"add-field":{"name":"description","type":"integer"}}""" ) test("encode multiple schema commands into a single object"): val vs = Seq( DeleteType(TypeName("integer")), DeleteType(TypeName("float")), - SchemaCommand.Add(Field(FieldName("description"), TypeName("text"))) + SchemaCommand.Add( + Field(FieldName("description"), TypeName("text"), required = true) + ) ) assertEquals( Json.encode(vs).toUtf8String, - """{"delete-field-type":{"name":"integer"},"delete-field-type":{"name":"float"},"add-field":{"name":"description","type":"text","required":false,"indexed":true,"stored":true,"multiValued":false,"uninvertible":true,"docValues":false}}""".stripMargin + """{"delete-field-type":{"name":"integer"},"delete-field-type":{"name":"float"},"add-field":{"name":"description","type":"text","required":true}}""".stripMargin ) + + test("decode schema response"): + val schemaResponseText = Source.fromResource("schema-response.json").mkString + val result = Json.decode(schemaResponseText.getBytes()).to[SchemaResponse].value + assertEquals(result.schema.copyFields.size, 16) + assertEquals(result.schema.dynamicFields.size, 69) + assertEquals(result.schema.fields.size, 30) + assertEquals(result.schema.fieldTypes.size, 73) + assert(result.schema.fields.exists(_.name == FieldName("_kind"))) + assert(result.schema.copyFields.exists(_.source == FieldName("description"))) } diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrClientBaseSuite.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrClientBaseSuite.scala index 2178d019..aac95a21 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrClientBaseSuite.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrClientBaseSuite.scala @@ -18,11 +18,42 @@ package io.renku.solr.client.util +import cats.effect.* + +import io.renku.search.GeneratorSyntax.* import io.renku.search.LoggingConfigure -import munit.CatsEffectSuite +import io.renku.solr.client.* +import munit.CatsEffectFixtures +import org.scalacheck.Gen -abstract class SolrClientBaseSuite - extends CatsEffectSuite +trait SolrClientBaseSuite + extends SolrServerSuite with LoggingConfigure - with SolrServerSuite with SolrTruncate + with CatsEffectFixtures: + + private val coreNameGen: Gen[String] = + Gen + .choose(5, 12) + .flatMap(n => Gen.listOfN(n, Gen.alphaChar)) + .map(_.mkString) + .map(name => s"test-core-$name") + + val solrClientR: Resource[IO, SolrClient[IO]] = + for + serverUri <- Resource.eval(IO(solrServer())) + coreName <- Resource.eval(IO(coreNameGen.generateOne)) + cfg = SolrConfig(serverUri, coreName, None, false) + client <- SolrClient[IO](cfg) + _ <- Resource.make(createSolrCore(client, coreName))(_ => + deleteSolrCore(client, coreName).start.void + ) + yield client + + val solrClient = ResourceSuiteLocalFixture("solr-client", solrClientR) + + def createSolrCore(client: SolrClient[IO], name: String): IO[Unit] = + IO.blocking(solrServerValue.createCore(name).get) + + def deleteSolrCore(client: SolrClient[IO], name: String): IO[Unit] = + IO.blocking(solrServerValue.deleteCore(name).get) diff --git a/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrServerSuite.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrServerSuite.scala index f6a8e3ee..23b543da 100644 --- a/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrServerSuite.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/util/SolrServerSuite.scala @@ -18,35 +18,29 @@ package io.renku.solr.client.util -import cats.effect.* - import io.renku.servers.SolrServer -import io.renku.solr.client.{SolrClient, SolrConfig} +import munit.Fixture import org.http4s.Uri +/** Starts the solr server if not already running. + * + * This is here for running single tests from outside sbt. Within sbt, the solr server is + * started before any test is run and therefore will live for the entire test run. + */ trait SolrServerSuite: - self: munit.Suite => - - protected lazy val server: SolrServer = SolrServer - protected lazy val coreName: String = server.testCoreName1 - protected lazy val solrConfig: SolrConfig = SolrConfig( - Uri.unsafeFromString(server.url) / "solr", - coreName, - maybeUser = None, - logMessageBodies = true - ) - val withSolrClient: Fixture[Resource[IO, SolrClient[IO]]] = - new Fixture[Resource[IO, SolrClient[IO]]]("solr"): + lazy val solrServerValue = SolrServer - def apply(): Resource[IO, SolrClient[IO]] = - SolrClient[IO](solrConfig) + val solrServer: Fixture[Uri] = + new Fixture[Uri]("solr-server"): + private var serverUri: Option[Uri] = None + def apply(): Uri = serverUri match + case Some(u) => u + case None => sys.error(s"Fixture $fixtureName not initialized") override def beforeAll(): Unit = - server.start() + solrServerValue.start() + serverUri = Some(Uri.unsafeFromString(solrServerValue.url)) override def afterAll(): Unit = - server.stop() - - override def munitFixtures: Seq[Fixture[?]] = - List(withSolrClient) + solrServerValue.stop() diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/TestSearchSolrServer.scala b/modules/solr-client/src/test/scala/io/renku/solr/client/util/TestSearchSolrServer.scala similarity index 96% rename from modules/search-solr-client/src/test/scala/io/renku/search/solr/client/TestSearchSolrServer.scala rename to modules/solr-client/src/test/scala/io/renku/solr/client/util/TestSearchSolrServer.scala index 85533396..91e324d5 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/TestSearchSolrServer.scala +++ b/modules/solr-client/src/test/scala/io/renku/solr/client/util/TestSearchSolrServer.scala @@ -16,7 +16,7 @@ * limitations under the License. */ -package io.renku.search.solr.client +package io.renku.solr.client.util import cats.effect.{ExitCode, IO, IOApp} diff --git a/nix/services.nix b/nix/services.nix index 5328e459..bcd3c301 100644 --- a/nix/services.nix +++ b/nix/services.nix @@ -6,8 +6,8 @@ }: { services.dev-solr = { enable = true; - cores = ["rsdev-test" "core-test1" "core-test2" "core-test3" "search-core-test"]; - heap = 512; + cores = ["rsdev-test" "search-core-test"]; + heap = 1024; }; networking.hostName = "rsdev"; diff --git a/project/RedisServer.scala b/project/RedisServer.scala index 898663be..df3804de 100644 --- a/project/RedisServer.scala +++ b/project/RedisServer.scala @@ -21,12 +21,9 @@ import java.util.concurrent.atomic.AtomicBoolean import scala.sys.process.* import scala.util.Try -object RedisServer extends RedisServer("graph", None) - -@annotation.nowarn() -class RedisServer(module: String, val redisPort: Option[Int]) { - - val port = redisPort.orElse(sys.env.get("RS_REDIS_PORT").map(_.toInt)).getOrElse(6379) +@annotation.nowarn +object RedisServer { + val port = sys.env.get("RS_REDIS_PORT").map(_.toInt).getOrElse(6379) val host: String = sys.env.get("RS_REDIS_HOST").getOrElse("localhost") val url: String = s"redis://$host:$port" @@ -34,7 +31,7 @@ class RedisServer(module: String, val redisPort: Option[Int]) { // to not start a Redis server via docker for the tests private val skipServer: Boolean = sys.env.contains("NO_REDIS") - private val containerName = s"$module-test-redis" + private val containerName = "search-test-redis" private val image = "redis:7.2.4-alpine" private val startCmd = s"""|docker run --rm |--name $containerName @@ -52,7 +49,7 @@ class RedisServer(module: String, val redisPort: Option[Int]) { if (skipServer) println("Not starting Redis via docker") else if (checkRunning) () else { - println(s"Starting Redis container for '$module' from '$image' image") + println(s"Starting Redis container for from '$image' image") startContainer() var rc = 1 val maxTries = 500 @@ -61,11 +58,11 @@ class RedisServer(module: String, val redisPort: Option[Int]) { counter += 1 Thread.sleep(500) rc = Process(isReadyCmd).! - if (rc == 0) println(s"Redis container for '$module' started on port $port") + if (rc == 0) println(s"Redis container for started on port $port") else println(s"IsReadyCmd returned $rc") } if (rc != 0) - sys.error(s"Redis container for '$module' could not be started on port $port") + sys.error(s"Redis container for could not be started on port $port") } } @@ -86,14 +83,14 @@ class RedisServer(module: String, val redisPort: Option[Int]) { def stop(): Unit = if (skipServer || !wasStartedHere.get()) () else { - println(s"Stopping Redis container for '$module'") + println(s"Stopping Redis container '$image'") stopCmd.!! () } def forceStop(): Unit = if (!skipServer) { - println(s"Stopping Redis container for '$module'") + println(s"Stopping Redis container '$image'") stopCmd.!! () } diff --git a/project/SolrServer.scala b/project/SolrServer.scala index 9943649f..da308772 100644 --- a/project/SolrServer.scala +++ b/project/SolrServer.scala @@ -19,17 +19,17 @@ package io.renku.servers import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.atomic.AtomicInteger + import scala.annotation.tailrec import scala.sys.process.* import scala.util.Try -object SolrServer extends SolrServer("graph", None) - -@annotation.nowarn() -class SolrServer(module: String, solrPort: Option[Int]) { +@annotation.nowarn +object SolrServer { + private val createCoreCounter: AtomicInteger = new AtomicInteger(0) - private val port = - solrPort.orElse(sys.env.get("RS_SOLR_PORT").map(_.toInt)).getOrElse(8983) + private val port = sys.env.get("RS_SOLR_PORT").map(_.toInt).getOrElse(8983) private val host: String = sys.env.get("RS_SOLR_HOST").getOrElse("localhost") val url: String = s"http://$host:$port" @@ -37,13 +37,10 @@ class SolrServer(module: String, solrPort: Option[Int]) { // to not start a Solr server via docker for the tests private val skipServer: Boolean = sys.env.contains("NO_SOLR") - private val containerName = s"$module-test-solr" + private val containerName = "search-test-solr" private val image = "solr:9.4.1-slim" val searchCoreName = "search-core-test" - val testCoreName1 = "core-test1" - val testCoreName2 = "core-test2" - val testCoreName3 = "core-test3" - private val cores = Set(testCoreName1, testCoreName2, testCoreName3, searchCoreName) + private val cores = Set(searchCoreName) private val startCmd = s"""|docker run --rm |--name $containerName |-p $port:8983 @@ -51,21 +48,48 @@ class SolrServer(module: String, solrPort: Option[Int]) { private val isRunningCmd = Seq("docker", "container", "ls", "--filter", s"name=$containerName") private val stopCmd = s"docker stop -t5 $containerName" - private def readyCmd(core: String) = - s"curl http://localhost:8983/solr/$core/select?q=*:* --no-progress-meter --fail 1> /dev/null" private def isCoreReadyCmd(core: String) = - Seq("docker", "exec", containerName, "sh", "-c", readyCmd(core)) - private def createCore(core: String) = s"precreate-core $core" + Seq( + "curl", + "--fail", + "-s", + "-o", + "/dev/null", + s"$url/solr/$core/select" + ) private def createCoreCmd(core: String) = - Seq("docker", "exec", containerName, "sh", "-c", createCore(core)) + sys.env + .get("RS_SOLR_CREATE_CORE_CMD") + .map(_.replace("%s", core)) + .getOrElse(s"docker exec $containerName solr create -c $core") + private def deleteCoreCmd(core: String) = + sys.env + .get("RS_SOLR_DELETE_CORE_CMD") + .map(_.replace("%s", core)) + .getOrElse(s"docker exec $containerName sh -c solr delete -c $core") + + // configsets are copied to $SOLR_HOME to allow core api to create cores + private val copyConfigSetsCmd = + Seq( + "docker", + "exec", + containerName, + "cp", + "-r", + "/opt/solr/server/solr/configsets", + "/var/solr/data/" + ) + private val wasStartedHere = new AtomicBoolean(false) def start(): Unit = if (skipServer) println("Not starting Solr via docker") else if (checkRunning) () else { - println(s"Starting Solr container for '$module' from '$image' image") + println(s"Starting Solr container '$image'") startContainer() + createCores(cores) + copyConfigSets() waitForCoresToBeReady() } @@ -77,9 +101,9 @@ class SolrServer(module: String, solrPort: Option[Int]) { counter += 1 Thread.sleep(500) rc = checkCoresReady - if (rc == 0) println(s"Solr cores for '$module' ready on port $port") + if (rc == 0) println(s"Solr cores ready on port $port") } - if (rc != 0) sys.error(s"Solr cores for '$module' could not be started") + if (rc != 0) sys.error(s"Solr cores could not be started") } private def checkCoresReady = @@ -95,12 +119,27 @@ class SolrServer(module: String, solrPort: Option[Int]) { private def startContainer(): Unit = { val retryOnContainerFailedToRun: Throwable => Unit = { case ex if ex.getMessage contains "Nonzero exit value: 125" => - Thread.sleep(500); start() + Thread.sleep(500); startContainer() case ex => throw ex } Try(startCmd.!!).fold(retryOnContainerFailedToRun, _ => wasStartedHere.set(true)) Thread.sleep(500) - createCores(cores) + } + + private def copyConfigSets(): Unit = + copyConfigSetsCmd.!! + + def createCore(name: String): Try[Unit] = { + val cmd = createCoreCmd(name) + val n = createCoreCounter.incrementAndGet() + println(s"Create $n-th core: $cmd") + Try(cmd.!!).map(_ => ()) + } + + def deleteCore(name: String): Try[Unit] = { + val cmd = deleteCoreCmd(name) + println(s"Run delete core: $cmd") + Try(cmd.!!).map(_ => ()) } @tailrec @@ -127,14 +166,14 @@ class SolrServer(module: String, solrPort: Option[Int]) { def stop(): Unit = if (skipServer || !wasStartedHere.get()) () else { - println(s"Stopping Solr container for '$module'") + println(s"Stopping Solr container '$image'") stopCmd.!! () } def forceStop(): Unit = if (!skipServer) { - println(s"Stopping Solr container for '$module'") + println(s"Stopping Solr container '$image'") stopCmd.!! () }