From b1b35b9e7e3fad5caa3feebea5608b88e04848fa Mon Sep 17 00:00:00 2001 From: Jakub Chrobasik Date: Fri, 16 Feb 2024 19:22:13 +0100 Subject: [PATCH] fix: obtaining redis client from Redis Sentinel --- .../io/renku/queue/client/QueueClient.scala | 4 +- .../io/renku/redis/client/ClientCreator.scala | 97 ++++++++++++++++ .../redis/client/ConnectionCreator.scala | 105 ------------------ .../io/renku/redis/client/RedisConfig.scala | 10 +- .../renku/redis/client/RedisQueueClient.scala | 27 +++-- .../renku/redis/client/util/RedisSpec.scala | 2 +- .../scala/io/renku/search/api/SearchApi.scala | 2 +- .../renku/search/provision/Microservice.scala | 3 +- .../search/provision/SearchProvisioner.scala | 68 +++++++----- .../provision/SearchProvisionerSpec.scala | 4 +- .../search/solr/client/SearchSolrClient.scala | 2 +- 11 files changed, 175 insertions(+), 149 deletions(-) create mode 100644 modules/redis-client/src/main/scala/io/renku/redis/client/ClientCreator.scala delete mode 100644 modules/redis-client/src/main/scala/io/renku/redis/client/ConnectionCreator.scala diff --git a/modules/redis-client/src/main/scala/io/renku/queue/client/QueueClient.scala b/modules/redis-client/src/main/scala/io/renku/queue/client/QueueClient.scala index 4d867ae0..39a7a870 100644 --- a/modules/redis-client/src/main/scala/io/renku/queue/client/QueueClient.scala +++ b/modules/redis-client/src/main/scala/io/renku/queue/client/QueueClient.scala @@ -43,5 +43,5 @@ trait QueueClient[F[_]] { } object QueueClient: - def apply[F[_]: Async](redisConfig: RedisConfig): Resource[F, QueueClient[F]] = - RedisQueueClient[F](redisConfig) + def make[F[_]: Async](redisConfig: RedisConfig): Resource[F, QueueClient[F]] = + RedisQueueClient.make[F](redisConfig) diff --git a/modules/redis-client/src/main/scala/io/renku/redis/client/ClientCreator.scala b/modules/redis-client/src/main/scala/io/renku/redis/client/ClientCreator.scala new file mode 100644 index 00000000..c05890f5 --- /dev/null +++ b/modules/redis-client/src/main/scala/io/renku/redis/client/ClientCreator.scala @@ -0,0 +1,97 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.redis.client + +import cats.MonadThrow +import cats.effect.{Async, Resource} +import cats.syntax.all.* +import dev.profunktor.redis4cats.connection.{RedisClient, RedisURI} +import dev.profunktor.redis4cats.effect.Log +import io.lettuce.core.RedisURI as JRedisURI +import io.lettuce.core.sentinel.api.StatefulRedisSentinelConnection + +import scala.collection.mutable +import scala.jdk.CollectionConverters.given + +sealed private trait ClientCreator[F[_]]: + def makeClient: Resource[F, RedisClient] + +private object ClientCreator: + + def apply[F[_]: Async: Log](cfg: RedisConfig): ClientCreator[F] = + if cfg.sentinel then new SentinelClientCreator(cfg) + else new SingleNodeClientCreator(cfg) + +private class SingleNodeClientCreator[F[_]: Async: Log](cfg: RedisConfig) + extends ClientCreator[F]: + + override def makeClient: Resource[F, RedisClient] = + RedisClient[F].fromUri(cfg.asRedisUri) + +private class SentinelClientCreator[F[_]: Async: Log](cfg: RedisConfig) + extends ClientCreator[F]: + + override def makeClient: Resource[F, RedisClient] = + makeSentinelClient >>= makeMasterNodeClient + + private def makeSentinelClient = + RedisClient[F].fromUri(cfg.asRedisUri) + + private def makeMasterNodeClient(sentinelRedisClient: RedisClient) = + Resource + .eval(connectSentinel(sentinelRedisClient) >>= findMasterNodeUri) + .flatMap(RedisClient[F].fromUri(_)) + + private def connectSentinel( + client: RedisClient + ): F[StatefulRedisSentinelConnection[String, String]] = + MonadThrow[F].catchNonFatal( + client.underlying.connectSentinel() + ) + + private def findMasterNodeUri(conn: StatefulRedisSentinelConnection[String, String]) = + findMasterSet >>= (findMasterNodeInfo(conn, _)) >>= findNodeUri + + private def findMasterSet: F[RedisMasterSet] = + MonadThrow[F].fromOption( + cfg.maybeMasterSet, + new Exception("No Redis MasterSet configured") + ) + + private def findMasterNodeInfo( + conn: StatefulRedisSentinelConnection[String, String], + masterSet: RedisMasterSet + ): F[mutable.Map[String, String]] = + MonadThrow[F].catchNonFatal( + conn.sync().master(masterSet.value).asScala + ) + + private def findNodeUri( + nodeInfo: mutable.Map[String, String] + ): F[RedisURI] = + MonadThrow[F].fromOption( + (nodeInfo.get("ip") -> nodeInfo.get("port")) + .mapN { (host, port) => + val builder = JRedisURI.Builder.redis(host, port.toInt) + cfg.maybePassword.map(_.value.toCharArray).fold(builder)(builder.withPassword) + cfg.maybeDB.map(_.value).fold(builder)(builder.withDatabase) + RedisURI.fromUnderlying(builder.build()) + }, + new Exception("No Redis Master node URI found") + ) diff --git a/modules/redis-client/src/main/scala/io/renku/redis/client/ConnectionCreator.scala b/modules/redis-client/src/main/scala/io/renku/redis/client/ConnectionCreator.scala deleted file mode 100644 index 5eab0684..00000000 --- a/modules/redis-client/src/main/scala/io/renku/redis/client/ConnectionCreator.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.redis.client - -import cats.effect.{Async, Resource} -import dev.profunktor.redis4cats.connection.{RedisClient, RedisMasterReplica, RedisURI} -import dev.profunktor.redis4cats.data.RedisCodec -import dev.profunktor.redis4cats.effect.Log -import dev.profunktor.redis4cats.streams.{RedisStream, Streaming} -import dev.profunktor.redis4cats.{Redis, RedisCommands} -import fs2.Stream -import io.lettuce.core.{ReadFrom as JReadFrom, RedisURI as JRedisURI} -import scodec.bits.ByteVector - -import scala.jdk.CollectionConverters.given - -sealed private trait ConnectionCreator[F[_]]: - - def createStreamingConnection - : Stream[F, Streaming[[A] =>> Stream[F, A], String, ByteVector]] - - def createStringCommands: Resource[F, RedisCommands[F, String, String]] - -object ConnectionCreator: - - def create[F[_]: Async: Log](cfg: RedisConfig): Resource[F, ConnectionCreator[F]] = - val uri = redisUri(cfg) - if cfg.sentinel then - RedisClient[F] - .fromUri(RedisURI.fromUnderlying(uri.getSentinels.asScala.toList.head)) - .map(new SingleConnectionCreator(_)) -// Resource.eval[F, ConnectionCreator[F]] { -// ApplicativeThrow[F] -// .catchNonFatal { -// uri.getSentinels.asScala.toList.map(RedisURI.fromUnderlying) -// } -// .map(new SentinelConnectionCreator(_)) -// } - else - RedisClient[F] - .fromUri(RedisURI.fromUnderlying(uri)) - .map(new SingleConnectionCreator(_)) - - private def redisUri(cfg: RedisConfig): JRedisURI = { - - val uriBuilder = JRedisURI.builder - cfg.maybeDB.map(_.value).foreach(uriBuilder.withDatabase) - - if cfg.sentinel then - cfg.maybePassword.fold( - uriBuilder.withSentinel(cfg.host.value, cfg.port.value) - )(pass => uriBuilder.withSentinel(cfg.host.value, cfg.port.value, pass.value)) - cfg.maybeMasterSet.map(_.value).foreach(uriBuilder.withSentinelMasterId) - else - uriBuilder - .withHost(cfg.host.value) - .withPort(cfg.port.value) - cfg.maybePassword.foreach(pass => uriBuilder.withPassword(pass.value.toCharArray)) - - uriBuilder.build() - } - -private class SingleConnectionCreator[F[_]: Async: Log](client: RedisClient) - extends ConnectionCreator[F]: - - override def createStreamingConnection - : Stream[F, Streaming[[A] =>> Stream[F, A], String, ByteVector]] = - RedisStream - .mkStreamingConnection[F, String, ByteVector](client, StringBytesCodec.instance) - - override def createStringCommands: Resource[F, RedisCommands[F, String, String]] = - Redis[F].fromClient(client, RedisCodec.Utf8) - -private class SentinelConnectionCreator[F[_]: Async: Log](uris: List[RedisURI]) - extends ConnectionCreator[F]: - - private val maybeReadFrom: Option[JReadFrom] = None - - override def createStreamingConnection - : Stream[F, Streaming[[A] =>> Stream[F, A], String, ByteVector]] = - RedisStream.mkMasterReplicaConnection[F, String, ByteVector]( - StringBytesCodec.instance, - uris: _* - )(maybeReadFrom) - - override def createStringCommands: Resource[F, RedisCommands[F, String, String]] = - RedisMasterReplica[F] - .make(RedisCodec.Utf8, uris: _*)(maybeReadFrom) - .flatMap(Redis[F].masterReplica) diff --git a/modules/redis-client/src/main/scala/io/renku/redis/client/RedisConfig.scala b/modules/redis-client/src/main/scala/io/renku/redis/client/RedisConfig.scala index 1a5030fe..3787e39e 100644 --- a/modules/redis-client/src/main/scala/io/renku/redis/client/RedisConfig.scala +++ b/modules/redis-client/src/main/scala/io/renku/redis/client/RedisConfig.scala @@ -18,6 +18,9 @@ package io.renku.redis.client +import dev.profunktor.redis4cats.connection.RedisURI +import io.lettuce.core.RedisURI as JRedisURI + final case class RedisConfig( host: RedisHost, port: RedisPort, @@ -25,7 +28,12 @@ final case class RedisConfig( maybeDB: Option[RedisDB] = None, maybePassword: Option[RedisPassword] = None, maybeMasterSet: Option[RedisMasterSet] = None -) +): + lazy val asRedisUri: RedisURI = + val builder = JRedisURI.Builder.redis(host.value, port.value) + maybePassword.map(_.value.toCharArray).fold(builder)(builder.withPassword) + maybeDB.map(_.value).fold(builder)(builder.withDatabase) + RedisURI.fromUnderlying(builder.build()) opaque type RedisHost = String object RedisHost { diff --git a/modules/redis-client/src/main/scala/io/renku/redis/client/RedisQueueClient.scala b/modules/redis-client/src/main/scala/io/renku/redis/client/RedisQueueClient.scala index 4be7e279..efd51cac 100644 --- a/modules/redis-client/src/main/scala/io/renku/redis/client/RedisQueueClient.scala +++ b/modules/redis-client/src/main/scala/io/renku/redis/client/RedisQueueClient.scala @@ -20,8 +20,12 @@ package io.renku.redis.client import cats.effect.{Async, Resource} import cats.syntax.all.* +import dev.profunktor.redis4cats.connection.RedisClient +import dev.profunktor.redis4cats.data.RedisCodec import dev.profunktor.redis4cats.effect.Log import dev.profunktor.redis4cats.streams.data.{StreamingOffset, XAddMessage, XReadMessage} +import dev.profunktor.redis4cats.streams.{RedisStream, Streaming} +import dev.profunktor.redis4cats.{Redis, RedisCommands} import fs2.Stream import io.renku.queue.client.* import scodec.bits.ByteVector @@ -29,13 +33,12 @@ import scribe.Scribe object RedisQueueClient: - def apply[F[_]: Async](redisConfig: RedisConfig): Resource[F, QueueClient[F]] = + def make[F[_]: Async](redisConfig: RedisConfig): Resource[F, QueueClient[F]] = given Scribe[F] = scribe.cats[F] given Log[F] = RedisLogger[F] - ConnectionCreator.create[F](redisConfig).map(new RedisQueueClient(_)) + ClientCreator[F](redisConfig).makeClient.map(new RedisQueueClient(_)) -class RedisQueueClient[F[_]: Async: Log](cc: ConnectionCreator[F]) - extends QueueClient[F] { +class RedisQueueClient[F[_]: Async: Log](client: RedisClient) extends QueueClient[F] { private val payloadKey = "payload" private val encodingKey = "encoding" @@ -52,7 +55,7 @@ class RedisQueueClient[F[_]: Async: Log](cc: ConnectionCreator[F]) Map(payloadKey -> message, encodingKey -> encodeEncoding(encoding)) ) ) - cc.createStreamingConnection + createStreamingConnection .flatMap(_.append(m)) .map(id => MessageId(id.value)) .compile @@ -78,7 +81,7 @@ class RedisQueueClient[F[_]: Async: Log](cc: ConnectionCreator[F]) .map(id => StreamingOffset.Custom[String](_, id.value)) .getOrElse(StreamingOffset.All[String]) - cc.createStreamingConnection >>= { + createStreamingConnection >>= { _.read(Set(queueName.name), chunkSize, initialOffset) .map(toMessage) .collect { case Some(m) => m } @@ -95,7 +98,7 @@ class RedisQueueClient[F[_]: Async: Log](cc: ConnectionCreator[F]) queueName: QueueName, messageId: MessageId ): F[Unit] = - cc.createStringCommands.use { + createStringCommands.use { _.set(formProcessedKey(clientId, queueName), messageId.value) } @@ -103,10 +106,18 @@ class RedisQueueClient[F[_]: Async: Log](cc: ConnectionCreator[F]) clientId: ClientId, queueName: QueueName ): F[Option[MessageId]] = - cc.createStringCommands.use { + createStringCommands.use { _.get(formProcessedKey(clientId, queueName)).map(_.map(MessageId.apply)) } private def formProcessedKey(clientId: ClientId, queueName: QueueName) = s"$queueName.$clientId" + + private def createStreamingConnection + : Stream[F, Streaming[[A] =>> Stream[F, A], String, ByteVector]] = + RedisStream + .mkStreamingConnection[F, String, ByteVector](client, StringBytesCodec.instance) + + private def createStringCommands: Resource[F, RedisCommands[F, String, String]] = + Redis[F].fromClient(client, RedisCodec.Utf8) } diff --git a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala index c9a54429..88a194f1 100644 --- a/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala +++ b/modules/redis-client/src/test/scala/io/renku/redis/client/util/RedisSpec.scala @@ -55,7 +55,7 @@ trait RedisSpec: apply().flatMap(createRedisCommands) override def asQueueClient(): Resource[IO, QueueClient[IO]] = - RedisQueueClient[IO]( + RedisQueueClient.make[IO]( RedisConfig( RedisHost(server.host), RedisPort(server.port) diff --git a/modules/search-api/src/main/scala/io/renku/search/api/SearchApi.scala b/modules/search-api/src/main/scala/io/renku/search/api/SearchApi.scala index 989a9bc3..569be6f4 100644 --- a/modules/search-api/src/main/scala/io/renku/search/api/SearchApi.scala +++ b/modules/search-api/src/main/scala/io/renku/search/api/SearchApi.scala @@ -30,4 +30,4 @@ object SearchApi: def apply[F[_]: Async: Network]( solrConfig: SolrConfig ): Resource[F, SearchApi[F]] = - SearchSolrClient[F](solrConfig).map(new SearchApiImpl[F](_)) + SearchSolrClient.make[F](solrConfig).map(new SearchApiImpl[F](_)) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala index 5b0626ea..8b82ba84 100644 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala @@ -37,7 +37,8 @@ object Microservice extends IOApp: } yield ExitCode.Success private def startProvisioning(cfg: SearchProvisionConfig): IO[Unit] = - SearchProvisioner[IO](cfg.queueName, cfg.redisConfig, cfg.solrConfig) + SearchProvisioner + .make[IO](cfg.queueName, cfg.redisConfig, cfg.solrConfig) .evalMap(_.provisionSolr.start) .use(_ => IO.never) .handleErrorWith { err => diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/SearchProvisioner.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/SearchProvisioner.scala index b8a8b389..68287fee 100644 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/SearchProvisioner.scala +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/SearchProvisioner.scala @@ -19,7 +19,7 @@ package io.renku.search.provision import cats.MonadThrow -import cats.effect.{Async, Resource} +import cats.effect.{Async, Resource, Temporal} import cats.syntax.all.* import fs2.Chunk import fs2.io.net.Network @@ -43,43 +43,54 @@ object SearchProvisioner: private val clientId: ClientId = ClientId("search-provisioner") - def apply[F[_]: Async: Network]( + def make[F[_]: Async: Network]( queueName: QueueName, redisConfig: RedisConfig, solrConfig: SolrConfig ): Resource[F, SearchProvisioner[F]] = - QueueClient[F](redisConfig) - .flatMap(qc => SearchSolrClient[F](solrConfig).tupleLeft(qc)) - .map { case (qc, sc) => new SearchProvisionerImpl[F](clientId, queueName, qc, sc) } + SearchSolrClient.make[F](solrConfig).map { + new SearchProvisionerImpl[F]( + clientId, + queueName, + QueueClient.make[F](redisConfig), + _ + ) + } private class SearchProvisionerImpl[F[_]: Async]( clientId: ClientId, queueName: QueueName, - queueClient: QueueClient[F], + queueClientResource: Resource[F, QueueClient[F]], solrClient: SearchSolrClient[F] ) extends SearchProvisioner[F]: private given Scribe[F] = scribe.cats[F] override def provisionSolr: F[Unit] = - findLastProcessed >>= { maybeLastProcessed => - queueClient - .acquireEventsStream(queueName, chunkSize = 1, maybeLastProcessed) - .evalMap(decodeMessage) - .evalTap { case (m, v) => Scribe[F].info(s"Received messageId: ${m.id} $v") } - .groupWithin(chunkSize = 10, timeout = 500 millis) - .evalMap(pushToSolr) - .compile - .drain - .handleErrorWith(logAndRestart) - } + queueClientResource + .use { queueClient => + findLastProcessed(queueClient) >>= { maybeLastProcessed => + queueClient + .acquireEventsStream(queueName, chunkSize = 1, maybeLastProcessed) + .evalMap(decodeMessage(queueClient)) + .evalTap { case (m, v) => Scribe[F].info(s"Received messageId: ${m.id} $v") } + .groupWithin(chunkSize = 10, timeout = 500 millis) + .evalMap(pushToSolr(queueClient)) + .compile + .drain + .handleErrorWith(logAndRestart) + } + } + .handleErrorWith(logAndRestart) - private def findLastProcessed = + private def findLastProcessed(queueClient: QueueClient[F]) = queueClient.findLastProcessed(clientId, queueName) private val avro = AvroReader(ProjectCreated.SCHEMA$) - private def decodeMessage(message: Message): F[(Message, Seq[ProjectCreated])] = + private def decodeMessage(queueClient: QueueClient[F])( + message: Message + ): F[(Message, Seq[ProjectCreated])] = MonadThrow[F] .catchNonFatal { message.encoding match { @@ -88,9 +99,11 @@ private class SearchProvisionerImpl[F[_]: Async]( } } .map(message -> _) - .onError(markProcessedOnFailure(message)) + .onError(markProcessedOnFailure(message, queueClient)) - private def pushToSolr(chunk: Chunk[(Message, Seq[ProjectCreated])]): F[Unit] = + private def pushToSolr( + queueClient: QueueClient[F] + )(chunk: Chunk[(Message, Seq[ProjectCreated])]): F[Unit] = chunk.toList match { case Nil => ().pure[F] case tuples => @@ -98,8 +111,8 @@ private class SearchProvisionerImpl[F[_]: Async]( val (lastMessage, _) = tuples.last solrClient .insertProjects(allSolrDocs) - .flatMap(_ => markProcessed(lastMessage)) - .onError(markProcessedOnFailure(lastMessage)) + .flatMap(_ => markProcessed(lastMessage, queueClient)) + .onError(markProcessedOnFailure(lastMessage, queueClient)) } private lazy val toSolrDocuments: Seq[ProjectCreated] => Seq[Project] = @@ -121,14 +134,15 @@ private class SearchProvisionerImpl[F[_]: Async]( } private def markProcessedOnFailure( - message: Message + message: Message, + queueClient: QueueClient[F] ): PartialFunction[Throwable, F[Unit]] = err => - markProcessed(message) >> + markProcessed(message, queueClient) >> Scribe[F].error(s"Processing messageId: ${message.id} failed", err) - private def markProcessed(message: Message): F[Unit] = + private def markProcessed(message: Message, queueClient: QueueClient[F]): F[Unit] = queueClient.markProcessed(clientId, queueName, message.id) private def logAndRestart: Throwable => F[Unit] = err => Scribe[F].error("Failure in the provisioning process", err) >> - provisionSolr + Temporal[F].delayBy(provisionSolr, 30 seconds) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/SearchProvisionerSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/SearchProvisionerSpec.scala index 68944db8..35a16018 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/SearchProvisionerSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/SearchProvisionerSpec.scala @@ -47,7 +47,7 @@ class SearchProvisionerSpec extends CatsEffectSuite with RedisSpec with SearchSo redisAndSolrClients.use { case (queueClient, solrClient) => val provisioner = - new SearchProvisionerImpl(clientId, queue, queueClient, solrClient) + new SearchProvisionerImpl(clientId, queue, Resource.pure(queueClient), solrClient) for solrDocs <- SignallingRef.of[IO, Set[Project]](Set.empty) @@ -83,7 +83,7 @@ class SearchProvisionerSpec extends CatsEffectSuite with RedisSpec with SearchSo redisAndSolrClients.use { case (queueClient, solrClient) => val provisioner = - new SearchProvisionerImpl(clientId, queue, queueClient, solrClient) + new SearchProvisionerImpl(clientId, queue, Resource.pure(queueClient), solrClient) for solrDocs <- SignallingRef.of[IO, Set[Project]](Set.empty) diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala index 40e5301c..d5e7b632 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala @@ -30,7 +30,7 @@ trait SearchSolrClient[F[_]]: def findProjects(phrase: String): F[List[Project]] object SearchSolrClient: - def apply[F[_]: Async: Network]( + def make[F[_]: Async: Network]( solrConfig: SolrConfig ): Resource[F, SearchSolrClient[F]] = SolrClient[F](solrConfig).map(new SearchSolrClientImpl[F](_))