diff --git a/.envrc b/.envrc deleted file mode 100644 index 3550a30f..00000000 --- a/.envrc +++ /dev/null @@ -1 +0,0 @@ -use flake diff --git a/.envrc-template b/.envrc-template new file mode 100644 index 00000000..debc1a08 --- /dev/null +++ b/.envrc-template @@ -0,0 +1 @@ +use flake .#vm \ No newline at end of file diff --git a/.gitignore b/.gitignore index 33df8e62..cabf6666 100644 --- a/.gitignore +++ b/.gitignore @@ -117,5 +117,6 @@ helm-chart/renku-graph/charts/*tgz .DS_Store .direnv/ +.envrc *.qcow2 .tmp \ No newline at end of file diff --git a/flake.nix b/flake.nix index 74cb7e3b..00c1738c 100644 --- a/flake.nix +++ b/flake.nix @@ -39,6 +39,7 @@ [ redis jq + coreutils scala-cli devshellToolsPkgs.sbt17 devshellToolsPkgs.openapi-docs @@ -49,11 +50,12 @@ "projectCreated" "projectUpdated" "projectRemoved" - "projectAuthorizationAdded" - "projectAuthorizationUpdated" - "projectAuthorizationRemoved" + "projectAuthAdded" + "projectAuthUpdated" + "projectAuthRemoved" "userAdded" "userUpdated" + "userRemoved" ]; queueNameConfig = builtins.listToAttrs (builtins.map (qn: { name = "RS_REDIS_QUEUE_${qn}"; @@ -86,6 +88,8 @@ }); vm = pkgs.mkShellNoCC (queueNameConfig // { + RS_SOLR_HOST = "localhost"; + RS_SOLR_PORT = "18983"; RS_SOLR_URL = "http://localhost:18983/solr"; RS_SOLR_CORE = "rsdev-test"; RS_REDIS_HOST = "localhost"; diff --git a/modules/commons/src/main/scala/io/renku/search/model/EntityType.scala b/modules/commons/src/main/scala/io/renku/search/model/EntityType.scala index 3bc87e3c..667bcac7 100644 --- a/modules/commons/src/main/scala/io/renku/search/model/EntityType.scala +++ b/modules/commons/src/main/scala/io/renku/search/model/EntityType.scala @@ -25,7 +25,7 @@ enum EntityType: case Project case User - def name: String = productPrefix.toLowerCase + def name: String = productPrefix object EntityType: def fromString(str: String): Either[String, EntityType] = diff --git a/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala b/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala index bae43611..59ebd843 100644 --- a/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala +++ b/modules/config-values/src/main/scala/io/renku/search/config/ConfigValues.scala @@ -48,10 +48,10 @@ object ConfigValues extends ConfigDecoders: } def eventQueue(eventType: String): ConfigValue[Effect, QueueName] = - renv(s"REDIS_QUEUE_$eventType").default("events").as[QueueName] + renv(s"REDIS_QUEUE_$eventType").as[QueueName] val retryOnErrorDelay: ConfigValue[Effect, FiniteDuration] = - renv("RETRY_ON_ERROR_DELAY").default("2 seconds").as[FiniteDuration] + renv("RETRY_ON_ERROR_DELAY").default("10 seconds").as[FiniteDuration] val solrConfig: ConfigValue[Effect, SolrConfig] = { val url = renv("SOLR_URL").default("http://localhost:8983/solr").as[Uri] diff --git a/modules/renku-redis-client/src/main/scala/io/renku/queue/client/MessageHeader.scala b/modules/renku-redis-client/src/main/scala/io/renku/queue/client/MessageHeader.scala index e001e215..9d344bb6 100644 --- a/modules/renku-redis-client/src/main/scala/io/renku/queue/client/MessageHeader.scala +++ b/modules/renku-redis-client/src/main/scala/io/renku/queue/client/MessageHeader.scala @@ -18,11 +18,14 @@ package io.renku.queue.client +import cats.syntax.all.* import io.renku.events.v1.Header import org.apache.avro.Schema import java.time.Instant import java.time.temporal.ChronoUnit +import cats.effect.Clock +import cats.Functor final case class MessageHeader( source: MessageSource, @@ -66,6 +69,7 @@ object MessageSource: opaque type SchemaVersion = String object SchemaVersion: + val V1: SchemaVersion = "V1" def apply(v: String): SchemaVersion = v extension (self: SchemaVersion) def value: String = self @@ -73,6 +77,9 @@ opaque type CreationTime = Instant object CreationTime: def apply(v: Instant): CreationTime = v def now: CreationTime = Instant.now().truncatedTo(ChronoUnit.MILLIS) + def nowF[F[_]: Clock: Functor]: F[CreationTime] = + Clock[F].realTimeInstant.map(_.truncatedTo(ChronoUnit.MILLIS)) + extension (self: CreationTime) def value: Instant = self opaque type RequestId = String diff --git a/modules/search-api/src/main/scala/io/renku/search/api/SearchApiImpl.scala b/modules/search-api/src/main/scala/io/renku/search/api/SearchApiImpl.scala index 115ed418..da9abee7 100644 --- a/modules/search-api/src/main/scala/io/renku/search/api/SearchApiImpl.scala +++ b/modules/search-api/src/main/scala/io/renku/search/api/SearchApiImpl.scala @@ -26,7 +26,7 @@ import io.renku.search.api.data.* import io.renku.search.model.EntityType import io.renku.search.model.Id import io.renku.search.solr.client.SearchSolrClient -import io.renku.search.solr.documents.Entity as SolrEntity +import io.renku.search.solr.documents.EntityDocument import io.renku.search.solr.schema.EntityDocumentSchema.Fields import io.renku.solr.client.QueryResponse import io.renku.solr.client.facet.FacetResponse @@ -58,7 +58,7 @@ private class SearchApiImpl[F[_]: Async](solrClient: SearchSolrClient[F]) .map(_.asLeft[SearchResult]) private def toApiResult(currentPage: PageDef)( - solrResult: QueryResponse[SolrEntity] + solrResult: QueryResponse[EntityDocument] ): SearchResult = val hasMore = solrResult.responseBody.docs.size > currentPage.limit val pageInfo = PageWithTotals(currentPage, solrResult.responseBody.numFound, hasMore) @@ -76,6 +76,6 @@ private class SearchApiImpl[F[_]: Async](solrClient: SearchSolrClient[F]) if (hasMore) SearchResult(items.init, facets, pageInfo) else SearchResult(items, facets, pageInfo) - private lazy val toApiEntity: SolrEntity => SearchEntity = + private lazy val toApiEntity: EntityDocument => SearchEntity = given Transformer[Id, UserId] = (id: Id) => UserId(id) _.to[SearchEntity] diff --git a/modules/search-api/src/main/scala/io/renku/search/api/data/SearchEntity.scala b/modules/search-api/src/main/scala/io/renku/search/api/data/SearchEntity.scala index 2a9ad86b..ee30bea8 100644 --- a/modules/search-api/src/main/scala/io/renku/search/api/data/SearchEntity.scala +++ b/modules/search-api/src/main/scala/io/renku/search/api/data/SearchEntity.scala @@ -82,7 +82,6 @@ final case class User( id: Id, firstName: Option[users.FirstName] = None, lastName: Option[users.LastName] = None, - email: Option[users.Email] = None, score: Option[Double] = None ) extends SearchEntity @@ -98,7 +97,6 @@ object User: Id("1CAF4C73F50D4514A041C9EDDB025A36"), Some(users.FirstName("Albert")), Some(users.LastName("Einstein")), - Some(users.Email("albert.einstein@mail.com")), Some(2.1) ): SearchEntity ) diff --git a/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala b/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala index 0f8a0469..449d73f6 100644 --- a/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala +++ b/modules/search-api/src/test/scala/io/renku/search/api/SearchApiSpec.scala @@ -29,7 +29,7 @@ import io.renku.search.model.users.FirstName import io.renku.search.query.Query import io.renku.search.solr.client.SearchSolrSpec import io.renku.search.solr.client.SolrDocumentGenerators.* -import io.renku.search.solr.documents.{Entity as SolrEntity, User as SolrUser} +import io.renku.search.solr.documents.{EntityDocument, User as SolrUser} import munit.CatsEffectSuite import scribe.Scribe @@ -74,8 +74,8 @@ class SearchApiSpec extends CatsEffectSuite with SearchSolrSpec: private def mkQuery(phrase: String): QueryInput = QueryInput.pageOne(Query.parse(s"Fields $phrase").fold(sys.error, identity)) - private def toApiEntities(e: SolrEntity*) = e.map(toApiEntity) + private def toApiEntities(e: EntityDocument*) = e.map(toApiEntity) - private def toApiEntity(e: SolrEntity) = + private def toApiEntity(e: EntityDocument) = given Transformer[Id, UserId] = (id: Id) => UserId(id) e.to[SearchEntity] diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/BackgroundProcessManage.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/BackgroundProcessManage.scala new file mode 100644 index 00000000..18c0203c --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/BackgroundProcessManage.scala @@ -0,0 +1,89 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.syntax.all.* +import cats.effect.* +import cats.effect.kernel.Fiber +import cats.effect.kernel.Ref +import scala.concurrent.duration.FiniteDuration + +trait BackgroundProcessManage[F[_]]: + def register(name: String, process: F[Unit]): F[Unit] + + /** Starts all registered tasks in the background, represented by `F[Unit]`. */ + def background: Resource[F, F[Unit]] + + /** Same as `.background.useForever` */ + def startAll: F[Nothing] + +object BackgroundProcessManage: + type Process[F[_]] = Fiber[F, Throwable, Unit] + + private case class State[F[_]](tasks: Map[String, F[Unit]]): + def put(name: String, p: F[Unit]): State[F] = + State(tasks.updated(name, p)) + + def getTasks: List[F[Unit]] = tasks.values.toList + + private object State: + def empty[F[_]]: State[F] = State[F](Map.empty) + + def apply[F[_]: Async]( + retryDelay: FiniteDuration, + maxRetries: Option[Int] = None + ): F[BackgroundProcessManage[F]] = + val logger = scribe.cats.effect[F] + Ref.of[F, State[F]](State.empty[F]).map { state => + new BackgroundProcessManage[F] { + def register(name: String, task: F[Unit]): F[Unit] = + state.update(_.put(name, wrapTask(name, task))) + + def startAll: F[Nothing] = + state.get + .flatMap(s => logger.info(s"Starting ${s.tasks.size} background tasks")) >> + background.useForever + + def background: Resource[F, F[Unit]] = + for { + ts <- Resource.eval(state.get.map(_.getTasks)) + x <- ts.traverse(t => Async[F].background(t)) + y = x.traverse_(_.map(_.embed(logger.info(s"Got cancelled")))) + } yield y + + def wrapTask(name: String, task: F[Unit]): F[Unit] = + def run(c: Ref[F, Long]): F[Unit] = + logger.info(s"Starting process for: ${name}") >> + task.handleErrorWith { err => + c.updateAndGet(_ + 1).flatMap { + case n if maxRetries.exists(_ <= n) => + logger.error( + s"Max retries ($maxRetries) for process ${name} exceeded" + ) >> Async[F].raiseError(err) + case n => + val maxRetriesLabel = maxRetries.map(m => s"/$m").getOrElse("") + logger.error( + s"Starting process for '${name}' failed ($n$maxRetriesLabel), retrying", + err + ) >> Async[F].delayBy(run(c), retryDelay) + } + } + Ref.of[F, Long](0).flatMap(run) + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/MessageHandlers.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/MessageHandlers.scala new file mode 100644 index 00000000..b00e8258 --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/MessageHandlers.scala @@ -0,0 +1,142 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.Show +import cats.effect.* +import fs2.Stream + +import io.renku.events.v1.* +import io.renku.redis.client.QueueName +import io.renku.search.provision.handler.* +import io.renku.search.solr.documents.EntityDocument + +/** The entry point for defining all message handlers. + * + * They are defined as vals to have them automatically added to a collection, to be + * easier accessed from the main method. + */ +final class MessageHandlers[F[_]: Async]( + steps: QueueName => PipelineSteps[F], + cfg: QueuesConfig +) extends ShowInstances: + private[this] var tasks: Map[String, F[Unit]] = Map.empty + private[this] def add(queue: QueueName, task: Stream[F, Unit]): Stream[F, Unit] = + tasks = tasks.updated(queue.name, task.compile.drain) + task + + def getAll: Map[String, F[Unit]] = tasks + + val projectCreated: Stream[F, Unit] = + add(cfg.projectCreated, makeCreated[ProjectCreated](cfg.projectCreated)) + + val projectUpdated = + add( + cfg.projectUpdated, + makeUpdated[ProjectUpdated](cfg.projectUpdated, DocumentUpdates.project) + ) + + val projectRemoved: Stream[F, Unit] = + add(cfg.projectRemoved, makeRemovedSimple[ProjectRemoved](cfg.projectRemoved)) + + val projectAuthAdded: Stream[F, Unit] = + add( + cfg.projectAuthorizationAdded, + makeUpdated[ProjectAuthorizationAdded]( + cfg.projectAuthorizationAdded, + DocumentUpdates.projectAuthAdded + ) + ) + + val projectAuthUpdated: Stream[F, Unit] = + add( + cfg.projectAuthorizationUpdated, + makeUpdated[ProjectAuthorizationUpdated]( + cfg.projectAuthorizationUpdated, + DocumentUpdates.projectAuthUpdated + ) + ) + + val projectAuthRemoved: Stream[F, Unit] = add( + cfg.projectAuthorizationRemoved, + makeUpdated[ProjectAuthorizationRemoved]( + cfg.projectAuthorizationRemoved, + DocumentUpdates.projectAuthRemoved + ) + ) + + val userAdded: Stream[F, Unit] = + add(cfg.userAdded, makeCreated[UserAdded](cfg.userAdded)) + + val userUpdated = + add(cfg.userUpdated, makeUpdated[UserUpdated](cfg.userUpdated, DocumentUpdates.user)) + + val userRemoved = + val ps = steps(cfg.userRemoved) + add( + cfg.userRemoved, + ps.reader + .read[UserRemoved] + .through(ps.deleteFromSolr.tryDeleteAll) + .through(ps.deleteFromSolr.whenSuccess { msg => + Stream + .emit(msg.map(IdExtractor[UserRemoved].getId)) + .through(ps.userUtils.removeFromProjects) + .compile + .drain + }) + ) + + private def makeCreated[A](queue: QueueName)(using + QueueMessageDecoder[F, A], + DocumentConverter[A], + Show[A] + ): Stream[F, Unit] = + val ps = steps(queue) + ps.reader + .read[A] + .chunks + .through(ps.converter.convertChunk) + .through(ps.pushToSolr.pushChunk) + + private def makeUpdated[A]( + queue: QueueName, + docUpdate: (A, EntityDocument) => Option[EntityDocument] + )(using + QueueMessageDecoder[F, A], + Show[A], + IdExtractor[A] + ): Stream[F, Unit] = + val ps = steps(queue) + ps.reader + .read[A] + .through(ps.fetchFromSolr.fetch1) + .map(_.update(docUpdate)) + .through(ps.pushToSolr.push) + + private def makeRemovedSimple[A](queue: QueueName)(using + QueueMessageDecoder[F, A], + Show[A], + IdExtractor[A] + ): Stream[F, Unit] = + val ps = steps(queue) + ps.reader + .read[A] + .chunks + .through(ps.deleteFromSolr.deleteAll) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala index 86ff6f26..79bb3d5c 100644 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/Microservice.scala @@ -20,133 +20,30 @@ package io.renku.search.provision import cats.effect.* import cats.syntax.all.* + import io.renku.logging.LoggingSetup -import io.renku.redis.client.QueueName -import io.renku.search.provision.project.* -import io.renku.search.provision.user.* import io.renku.search.solr.schema.Migrations import io.renku.solr.client.migration.SchemaMigrator -import scribe.Scribe -import scribe.cats.* object Microservice extends IOApp: - - private val loadConfig: IO[SearchProvisionConfig] = - SearchProvisionConfig.config.load[IO] + private val logger = scribe.cats.io override def run(args: List[String]): IO[ExitCode] = - for { - config <- loadConfig - _ <- IO(LoggingSetup.doConfigure(config.verbosity)) - _ <- runSolrMigrations(config) - _ <- startProvisioners(config) - } yield ExitCode.Success - - private def startProvisioners(cfg: SearchProvisionConfig): IO[Unit] = - List( - ( - "ProjectCreated", - cfg.queuesConfig.projectCreated, - ProjectCreatedProvisioning - .make[IO](cfg.queuesConfig.projectCreated, cfg.redisConfig, cfg.solrConfig) - .map(_.provisioningProcess.start) - ), - ( - "ProjectUpdated", - cfg.queuesConfig.projectUpdated, - ProjectUpdatedProvisioning - .make[IO](cfg.queuesConfig.projectUpdated, cfg.redisConfig, cfg.solrConfig) - .map(_.provisioningProcess.start) - ), - ( - "ProjectRemoved", - cfg.queuesConfig.projectRemoved, - ProjectRemovedProcess - .make[IO](cfg.queuesConfig.projectRemoved, cfg.redisConfig, cfg.solrConfig) - .map(_.removalProcess.start) - ), - ( - "ProjectAuthorizationAdded", - cfg.queuesConfig.projectAuthorizationAdded, - AuthorizationAddedProvisioning - .make[IO]( - cfg.queuesConfig.projectAuthorizationAdded, - cfg.redisConfig, - cfg.solrConfig - ) - .map(_.provisioningProcess.start) - ), - ( - "ProjectAuthorizationUpdated", - cfg.queuesConfig.projectAuthorizationUpdated, - AuthorizationUpdatedProvisioning - .make[IO]( - cfg.queuesConfig.projectAuthorizationUpdated, - cfg.redisConfig, - cfg.solrConfig - ) - .map(_.provisioningProcess.start) - ), - ( - "ProjectAuthorizationRemoved", - cfg.queuesConfig.projectAuthorizationRemoved, - AuthorizationRemovedProvisioning - .make[IO]( - cfg.queuesConfig.projectAuthorizationRemoved, - cfg.redisConfig, - cfg.solrConfig - ) - .map(_.provisioningProcess.start) - ), - ( - "UserAdded", - cfg.queuesConfig.userAdded, - UserAddedProvisioning - .make[IO](cfg.queuesConfig.userAdded, cfg.redisConfig, cfg.solrConfig) - .map(_.provisioningProcess.start) - ), - ( - "UserUpdated", - cfg.queuesConfig.userUpdated, - UserUpdatedProvisioning - .make[IO](cfg.queuesConfig.userUpdated, cfg.redisConfig, cfg.solrConfig) - .map(_.provisioningProcess.start) - ), - ( - "UserRemoved", - cfg.queuesConfig.userRemoved, - UserRemovedProcess - .make[IO]( - cfg.queuesConfig.userRemoved, - cfg.queuesConfig.projectAuthorizationRemoved, - cfg.redisConfig, - cfg.solrConfig - ) - .map(_.removalProcess.start) - ) - ).parTraverse_(startProcess(cfg)) - .flatMap(_ => IO.never) - - private def startProcess( - cfg: SearchProvisionConfig - ): ((String, QueueName, Resource[IO, IO[FiberIO[Unit]]])) => IO[Unit] = { - case t @ (name, queue, resource) => - resource - .use(_ => - Scribe[IO].info(s"'$name' provisioning process started on '$queue' queue") - ) - .handleErrorWith { err => - Scribe[IO].error( - s"Starting provisioning process for '$name' failed, retrying", - err - ) >> Temporal[IO].delayBy(startProcess(cfg)(t), cfg.retryOnErrorDelay) - } - } - - private def runSolrMigrations(cfg: SearchProvisionConfig): IO[Unit] = + Services.make[IO].use { services => + for { + _ <- IO(LoggingSetup.doConfigure(services.config.verbosity)) + _ <- runSolrMigrations(services.config) + pm <- BackgroundProcessManage[IO](services.config.retryOnErrorDelay) + tasks = services.messageHandlers.getAll.toList + _ <- tasks.traverse_(pm.register.tupled) + _ <- pm.startAll + } yield ExitCode.Success + } + + def runSolrMigrations(cfg: SearchProvisionConfig): IO[Unit] = SchemaMigrator[IO](cfg.solrConfig) .use(_.migrate(Migrations.all)) .handleErrorWith { err => - Scribe[IO].error("Running solr migrations failure, retrying", err) >> + logger.error("Running solr migrations failure, retrying", err) >> Temporal[IO].delayBy(runSolrMigrations(cfg), cfg.retryOnErrorDelay) } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/OnSolrPersist.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/OnSolrPersist.scala deleted file mode 100644 index f5c127ed..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/OnSolrPersist.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import io.renku.queue.client.{QueueClient, RequestId} -import io.renku.search.solr.client.SearchSolrClient - -private trait OnSolrPersist[F[_], In]: - def execute(in: In, requestId: RequestId)( - queueClient: QueueClient[F], - solrClient: SearchSolrClient[F] - ): F[Unit] diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/ProvisioningProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/ProvisioningProcess.scala deleted file mode 100644 index 64b24b50..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/ProvisioningProcess.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import io.renku.redis.client.ClientId - -trait ProvisioningProcess[F[_]]: - def provisioningProcess: F[Unit] - -object ProvisioningProcess: - val clientId: ClientId = ClientId("search-provisioner") diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/QueueMessageDecoder.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/QueueMessageDecoder.scala deleted file mode 100644 index 692d7718..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/QueueMessageDecoder.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import cats.MonadThrow -import cats.syntax.all.* -import io.renku.avro.codec.{AvroDecoder, AvroReader} -import io.renku.queue.client.{DataContentType, QueueMessage} -import org.apache.avro.Schema - -private class QueueMessageDecoder[F[_]: MonadThrow, A](schema: Schema)(using - AvroDecoder[A] -): - private val avro = AvroReader(schema) - - def decodeMessage(message: QueueMessage): F[Seq[A]] = - findContentType.andThenF(decodePayload(message))(message) - - private def findContentType(message: QueueMessage): F[DataContentType] = - MonadThrow[F] - .fromEither(DataContentType.from(message.header.dataContentType)) - - private def decodePayload(message: QueueMessage): DataContentType => F[Seq[A]] = { - case DataContentType.Binary => catchNonFatal(avro.read[A](message.payload)) - case DataContentType.Json => catchNonFatal(avro.readJson[A](message.payload)) - } - - private def catchNonFatal(f: => Seq[A]): F[Seq[A]] = - MonadThrow[F].catchNonFatal(f) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/Services.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/Services.scala new file mode 100644 index 00000000..e94e593b --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/Services.scala @@ -0,0 +1,50 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.effect.kernel.Async +import cats.effect.kernel.Resource +import fs2.io.net.Network + +import io.renku.queue.client.QueueClient +import io.renku.redis.client.ClientId +import io.renku.search.provision.handler.PipelineSteps +import io.renku.search.solr.client.SearchSolrClient + +final case class Services[F[_]]( + config: SearchProvisionConfig, + solrClient: SearchSolrClient[F], + queueClient: Resource[F, QueueClient[F]], + messageHandlers: MessageHandlers[F] +) + +object Services: + val clientId: ClientId = ClientId("search-provisioner") + def make[F[_]: Async: Network]: Resource[F, Services[F]] = + for { + cfg <- Resource.eval(SearchProvisionConfig.config.load[F]) + solr <- SearchSolrClient.make[F](cfg.solrConfig) + + // The redis client must be initialized on each operation to + // be able to connect to the cluster + redis = QueueClient.make[F](cfg.redisConfig) + + steps = PipelineSteps[F](solr, redis, cfg.queuesConfig, 1, clientId) + handlers = MessageHandlers[F](steps, cfg.queuesConfig) + } yield Services(cfg, solr, redis, handlers) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/SolrRemovalProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/SolrRemovalProcess.scala deleted file mode 100644 index 33559d72..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/SolrRemovalProcess.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import cats.Show -import cats.data.NonEmptyList -import cats.effect.{Async, Resource, Temporal} -import cats.syntax.all.* -import fs2.io.net.Network -import io.github.arainko.ducktape.* -import io.renku.avro.codec.AvroDecoder -import io.renku.queue.client.{QueueClient, QueueMessage, RequestId} -import io.renku.redis.client.{ClientId, QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrClient -import io.renku.solr.client.SolrConfig -import org.apache.avro.Schema -import scribe.Scribe - -import scala.concurrent.duration.* - -trait SolrRemovalProcess[F[_]]: - def removalProcess: F[Unit] - -object SolrRemovalProcess: - - def make[F[_]: Async: Network: Scribe, In]( - queueName: QueueName, - inSchema: Schema, - redisConfig: RedisConfig, - solrConfig: SolrConfig, - onSolrPersist: Option[OnSolrPersist[F, In]] - )(using - Show[In], - Transformer[In, Id], - AvroDecoder[In] - ): Resource[F, SolrRemovalProcess[F]] = - SearchSolrClient.make[F](solrConfig).map { - new SolrRemovalProcessImpl[F, In]( - queueName, - ProvisioningProcess.clientId, - QueueClient.make[F](redisConfig), - _, - QueueMessageDecoder[F, In](inSchema), - onSolrPersist - ) - } - -private class SolrRemovalProcessImpl[F[_]: Async: Scribe, In]( - queueName: QueueName, - clientId: ClientId, - queueClientResource: Resource[F, QueueClient[F]], - solrClient: SearchSolrClient[F], - messageDecoder: QueueMessageDecoder[F, In], - onSolrPersist: Option[OnSolrPersist[F, In]] -)(using Show[In], Transformer[In, Id], AvroDecoder[In]) - extends SolrRemovalProcess[F]: - override def removalProcess: F[Unit] = - queueClientResource - .use { queueClient => - findLastProcessed(queueClient) >>= { maybeLastProcessed => - queueClient - .acquireEventsStream(queueName, chunkSize = 1, maybeLastProcessed) - .evalMap(decodeMessage(queueClient)) - .evalTap(logInfo) - .evalMap(deleteFromSolr(queueClient)) - .compile - .drain - .handleErrorWith(logAndRestart) - } - } - .handleErrorWith(logAndRestart) - - private def findLastProcessed(queueClient: QueueClient[F]) = - queueClient.findLastProcessed(clientId, queueName) - - private lazy val logInfo: ((QueueMessage, Seq[In])) => F[Unit] = { case (m, v) => - Scribe[F].info( - s"Received message queue: $queueName, " + - s"id: ${m.id}, " + - s"source: ${m.header.source}, " + - s"type: ${m.header.`type`} " + - s"for: ${v.mkString_(", ")}" - ) - } - - private def decodeMessage(queueClient: QueueClient[F])( - message: QueueMessage - ): F[(QueueMessage, Seq[In])] = - messageDecoder - .decodeMessage(message) - .tupleLeft(message) - .onError(markProcessedOnFailure(message, queueClient)) - - private def deleteFromSolr( - queueClient: QueueClient[F] - ): ((QueueMessage, Seq[In])) => F[Unit] = { case (message, ins) => - toDocumentIds(ins).fold(().pure[F]) { ids => - (solrClient.deleteIds(ids) >> onPersist(queueClient, message, ins)) - .flatMap(_ => markProcessed(message, queueClient)) - .onError(markProcessedOnFailure(message, queueClient)) - } - } - - private def onPersist( - queueClient: QueueClient[F], - message: QueueMessage, - ins: Seq[In] - ) = - onSolrPersist.fold(().pure[F]) { p => - ins.toList.traverse_( - p.execute(_, RequestId(message.header.requestId))(queueClient, solrClient) - ) - } - - private lazy val toDocumentIds: Seq[In] => Option[NonEmptyList[Id]] = - _.map(_.to[Id]).toList.toNel - - private def markProcessedOnFailure( - message: QueueMessage, - queueClient: QueueClient[F] - ): PartialFunction[Throwable, F[Unit]] = err => - markProcessed(message, queueClient) >> - Scribe[F].error(s"Processing messageId: ${message.id} failed", err) - - private def markProcessed(message: QueueMessage, queueClient: QueueClient[F]): F[Unit] = - queueClient.markProcessed(clientId, queueName, message.id) - - private def logAndRestart: Throwable => F[Unit] = err => - Scribe[F].error(s"Failure in the provisioning process for '$queueName'", err) >> - Temporal[F].delayBy(removalProcess, 30 seconds) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/UpdateProvisioningProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/UpdateProvisioningProcess.scala deleted file mode 100644 index 2bc35d24..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/UpdateProvisioningProcess.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import cats.Show -import cats.effect.{Async, Resource, Temporal} -import cats.syntax.all.* -import fs2.io.net.Network -import io.bullet.borer.Codec -import io.renku.avro.codec.AvroDecoder -import io.renku.queue.client.{QueueClient, QueueMessage} -import io.renku.redis.client.{ClientId, QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrClient -import io.renku.search.solr.documents.Entity -import io.renku.solr.client.SolrConfig -import org.apache.avro.Schema -import scribe.Scribe - -import scala.concurrent.duration.* -import scala.reflect.ClassTag - -trait UpdateProvisioningProcess[F[_]] extends ProvisioningProcess[F] - -object UpdateProvisioningProcess: - - def make[F[_]: Async: Network: Scribe, In, Out <: Entity]( - queueName: QueueName, - inSchema: Schema, - idExtractor: In => Id, - docUpdate: ((In, Out)) => Out, - redisConfig: RedisConfig, - solrConfig: SolrConfig - )(using - Show[In], - AvroDecoder[In], - Codec[Entity], - ClassTag[Out] - ): Resource[F, UpdateProvisioningProcess[F]] = - SearchSolrClient.make[F](solrConfig).map { - new UpdateProvisioningProcessImpl[F, In, Out]( - queueName, - ProvisioningProcess.clientId, - idExtractor, - docUpdate, - QueueClient.make[F](redisConfig), - _, - QueueMessageDecoder[F, In](inSchema) - ) - } - -private class UpdateProvisioningProcessImpl[F[_]: Async: Scribe, In, Out <: Entity]( - queueName: QueueName, - clientId: ClientId, - idExtractor: In => Id, - docUpdate: ((In, Out)) => Out, - queueClientResource: Resource[F, QueueClient[F]], - solrClient: SearchSolrClient[F], - messageDecoder: QueueMessageDecoder[F, In] -)(using Show[In], Codec[Entity], ClassTag[Out]) - extends UpdateProvisioningProcess[F]: - - override def provisioningProcess: F[Unit] = - queueClientResource - .use { queueClient => - findLastProcessed(queueClient) >>= { maybeLastProcessed => - queueClient - .acquireEventsStream(queueName, chunkSize = 1, maybeLastProcessed) - .evalMap(decodeMessage(queueClient)) - .evalTap(logInfo) - .evalMap(fetchDocuments) - .evalMap(pushToSolr(queueClient)) - .compile - .drain - .handleErrorWith(logAndRestart) - } - } - .handleErrorWith(logAndRestart) - - private def findLastProcessed(queueClient: QueueClient[F]) = - queueClient.findLastProcessed(clientId, queueName) - - private lazy val logInfo: ((QueueMessage, Seq[In])) => F[Unit] = { case (m, v) => - Scribe[F].info( - "Received message " + - s"queue: $queueName, " + - s"id: ${m.id}, " + - s"source: ${m.header.source}, " + - s"type: ${m.header.`type`} " + - s"for: ${v.mkString_(", ")}" - ) - } - - private def decodeMessage(queueClient: QueueClient[F])( - message: QueueMessage - ): F[(QueueMessage, Seq[In])] = - messageDecoder - .decodeMessage(message) - .tupleLeft(message) - .onError(markProcessedOnFailure(message, queueClient)) - - private lazy val fetchDocuments - : ((QueueMessage, Seq[In])) => F[(QueueMessage, Seq[(In, Out)])] = - case (m, ins) => - ins - .map { in => - val docId = idExtractor(in) - solrClient.findById[Out](docId) >>= { - case Some(out) => (in, out).some.pure[F] - case None => - Scribe[F] - .warn(s"Document id: '$docId' for update doesn't exist in Solr; skipping") - .as(Option.empty[Nothing]) - } - } - .sequence - .map(_.flatten) - .map((m, _)) - - private def pushToSolr( - queueClient: QueueClient[F] - ): ((QueueMessage, Seq[(In, Out)])) => F[Unit] = { case (m, inOuts) => - inOuts match { - case l if l.isEmpty => ().pure[F] - case inOuts => - val updatedDocs = inOuts.map(docUpdate).map(_.widen) - solrClient - .insert(updatedDocs) - .flatMap(_ => markProcessed(m, queueClient)) - .onError(markProcessedOnFailure(m, queueClient)) - } - } - - private def markProcessedOnFailure( - message: QueueMessage, - queueClient: QueueClient[F] - ): PartialFunction[Throwable, F[Unit]] = err => - markProcessed(message, queueClient) >> - Scribe[F].error(s"Processing messageId: ${message.id} for '$queueName' failed", err) - - private def markProcessed(message: QueueMessage, queueClient: QueueClient[F]): F[Unit] = - queueClient.markProcessed(clientId, queueName, message.id) - - private def logAndRestart: Throwable => F[Unit] = err => - Scribe[F].error(s"Failure in the provisioning process for '$queueName'", err) >> - Temporal[F].delayBy(provisioningProcess, 30 seconds) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/UpsertProvisioningProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/UpsertProvisioningProcess.scala deleted file mode 100644 index ccf2f347..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/UpsertProvisioningProcess.scala +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision - -import cats.Show -import cats.effect.{Async, Resource, Temporal} -import cats.syntax.all.* -import fs2.Chunk -import fs2.io.net.Network -import io.bullet.borer.Encoder -import io.github.arainko.ducktape.* -import io.renku.avro.codec.AvroDecoder -import io.renku.queue.client.{QueueClient, QueueMessage} -import io.renku.redis.client.{ClientId, QueueName, RedisConfig} -import io.renku.search.solr.client.SearchSolrClient -import io.renku.search.solr.documents.Entity -import io.renku.solr.client.SolrConfig -import org.apache.avro.Schema -import scribe.Scribe - -import scala.concurrent.duration.* - -trait UpsertProvisioningProcess[F[_]] extends ProvisioningProcess[F] - -object UpsertProvisioningProcess: - - def make[F[_]: Async: Network: Scribe, In, Out <: Entity]( - queueName: QueueName, - inSchema: Schema, - redisConfig: RedisConfig, - solrConfig: SolrConfig - )(using - Show[In], - Transformer[In, Out], - AvroDecoder[In], - Encoder[Entity] - ): Resource[F, UpsertProvisioningProcess[F]] = - SearchSolrClient.make[F](solrConfig).map { - new UpsertProvisioningProcessImpl[F, In, Out]( - queueName, - ProvisioningProcess.clientId, - QueueClient.make[F](redisConfig), - _, - QueueMessageDecoder[F, In](inSchema) - ) - } - -private class UpsertProvisioningProcessImpl[F[_]: Async: Scribe, In, Out <: Entity]( - queueName: QueueName, - clientId: ClientId, - queueClientResource: Resource[F, QueueClient[F]], - solrClient: SearchSolrClient[F], - messageDecoder: QueueMessageDecoder[F, In] -)(using Show[In], Transformer[In, Out], AvroDecoder[In], Encoder[Entity]) - extends UpsertProvisioningProcess[F]: - - override def provisioningProcess: F[Unit] = - queueClientResource - .use { queueClient => - findLastProcessed(queueClient) >>= { maybeLastProcessed => - queueClient - .acquireEventsStream(queueName, chunkSize = 1, maybeLastProcessed) - .evalMap(decodeMessage(queueClient)) - .evalTap(logInfo) - .groupWithin(chunkSize = 10, timeout = 500 millis) - .evalMap(pushToSolr(queueClient)) - .compile - .drain - .handleErrorWith(logAndRestart) - } - } - .handleErrorWith(logAndRestart) - - private def findLastProcessed(queueClient: QueueClient[F]) = - queueClient.findLastProcessed(clientId, queueName) - - private lazy val logInfo: ((QueueMessage, Seq[In])) => F[Unit] = { case (m, v) => - Scribe[F].info( - "Received message " + - s"queue: $queueName, " + - s"id: ${m.id}, " + - s"source: ${m.header.source}, " + - s"type: ${m.header.`type`} " + - s"for: ${v.mkString_(", ")}" - ) - } - - private def decodeMessage(queueClient: QueueClient[F])( - message: QueueMessage - ): F[(QueueMessage, Seq[In])] = - messageDecoder - .decodeMessage(message) - .tupleLeft(message) - .onError(markProcessedOnFailure(message, queueClient)) - - private def pushToSolr( - queueClient: QueueClient[F] - )(chunk: Chunk[(QueueMessage, Seq[In])]): F[Unit] = - chunk.toList match { - case Nil => ().pure[F] - case tuples => - val docs = toSolrDocuments(tuples.flatMap(_._2)) - val (lastMessage, _) = tuples.last - solrClient - .insert(docs.map(_.widen)) - .flatMap(_ => markProcessed(lastMessage, queueClient)) - .onError(markProcessedOnFailure(lastMessage, queueClient)) - } - - private lazy val toSolrDocuments: Seq[In] => Seq[Out] = - _.map(_.to[Out]) - - private def markProcessedOnFailure( - message: QueueMessage, - queueClient: QueueClient[F] - ): PartialFunction[Throwable, F[Unit]] = err => - markProcessed(message, queueClient) >> - Scribe[F].error(s"Processing messageId: ${message.id} for '$queueName' failed", err) - - private def markProcessed(message: QueueMessage, queueClient: QueueClient[F]): F[Unit] = - queueClient.markProcessed(clientId, queueName, message.id) - - private def logAndRestart: Throwable => F[Unit] = err => - Scribe[F].error(s"Failure in the provisioning process for '$queueName'", err) >> - Temporal[F].delayBy(provisioningProcess, 30 seconds) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ConvertDocument.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ConvertDocument.scala new file mode 100644 index 00000000..596df53c --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ConvertDocument.scala @@ -0,0 +1,48 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import fs2.Pipe + +import io.renku.search.solr.documents.EntityDocument +import fs2.Chunk +import MessageReader.Message + +trait ConvertDocument[F[_]]: + def convert[A](using + c: DocumentConverter[A] + ): Pipe[F, Message[A], Message[EntityDocument]] + + def convertChunk[A](using + c: DocumentConverter[A] + ): Pipe[F, Chunk[Message[A]], Chunk[Message[EntityDocument]]] + +object ConvertDocument: + /** Converts input messages into solr document values. */ + def apply[F[_]]: ConvertDocument[F] = + new ConvertDocument[F]: + def convert[A](using + c: DocumentConverter[A] + ): Pipe[F, Message[A], Message[EntityDocument]] = + _.map(m => m.map(c.convert)) + + def convertChunk[A](using + c: DocumentConverter[A] + ): Pipe[F, Chunk[Message[A]], Chunk[Message[EntityDocument]]] = + _.map(_.map(_.map(c.convert))) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DeleteFromSolr.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DeleteFromSolr.scala new file mode 100644 index 00000000..40c7ce8c --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DeleteFromSolr.scala @@ -0,0 +1,97 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.data.NonEmptyList +import cats.effect.Sync +import cats.syntax.all.* +import fs2.{Chunk, Pipe, Stream} + +import io.renku.search.provision.handler.DeleteFromSolr.DeleteResult +import io.renku.search.provision.handler.MessageReader.Message +import io.renku.search.solr.client.SearchSolrClient + +trait DeleteFromSolr[F[_]]: + def tryDeleteAll[A](using IdExtractor[A]): Pipe[F, Message[A], DeleteResult[A]] + def deleteAll[A](using IdExtractor[A]): Pipe[F, Chunk[Message[A]], Unit] + def whenSuccess[A](fb: Message[A] => F[Unit]): Pipe[F, DeleteResult[A], Unit] + +object DeleteFromSolr: + enum DeleteResult[A](val message: Message[A]): + case Success(override val message: Message[A]) extends DeleteResult(message) + case Failed(override val message: Message[A], error: Throwable) + extends DeleteResult(message) + case NoIds(override val message: Message[A]) extends DeleteResult(message) + + object DeleteResult: + def from[A](msg: Message[A])(eab: Either[Throwable, Unit]): DeleteResult[A] = + eab.fold(err => Failed(msg, err), _ => Success(msg)) + + def apply[F[_]: Sync]( + solrClient: SearchSolrClient[F], + reader: MessageReader[F] + ): DeleteFromSolr[F] = + new DeleteFromSolr[F] { + val logger = scribe.cats.effect[F] + def tryDeleteAll[A](using + IdExtractor[A] + ): Pipe[F, Message[A], DeleteResult[A]] = + _.evalMap { msg => + NonEmptyList.fromList(msg.decoded.map(IdExtractor[A].getId).toList) match + case Some(nel) => + logger.debug(s"Deleting documents with ids: $nel") >> + solrClient + .deleteIds(nel) + .attempt + .map(DeleteResult.from(msg)) + + case None => + Sync[F].pure(DeleteResult.NoIds(msg)) + } + + def whenSuccess[A](fb: Message[A] => F[Unit]): Pipe[F, DeleteResult[A], Unit] = + _.evalMap { + case DeleteResult.Success(m) => + logger.debug( + s"Deletion ${m.id} successful, running post processing action" + ) >> + fb(m).attempt.map(DeleteResult.from(m)) + case result => result.pure[F] + } + .through(markProcessed) + + def deleteAll[A](using IdExtractor[A]): Pipe[F, Chunk[Message[A]], Unit] = + _.flatMap(Stream.chunk) + .through(tryDeleteAll) + .through(markProcessed) + + private def markProcessed[A]: Pipe[F, DeleteResult[A], Unit] = + _.evalTap(result => reader.markProcessed(result.message.id)) + .evalMap { + case DeleteResult.Success(_) => Sync[F].unit + + case DeleteResult.Failed(msg, err) => + logger.error(s"Processing messageId: ${msg.id} failed", err) + + case DeleteResult.NoIds(msg) => + logger.info( + s"Not deleting from solr, since msg '${msg.id}' doesn't have document ids" + ) + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentConverter.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentConverter.scala new file mode 100644 index 00000000..23c873d9 --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentConverter.scala @@ -0,0 +1,60 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import io.renku.search.solr.documents.EntityDocument +import io.renku.search.solr.documents.Project as ProjectDocument +import io.renku.search.solr.documents.User as UserDocument +import io.github.arainko.ducktape.* +import io.renku.events.v1.* +import io.renku.search.model.Id +import io.renku.search.provision.TypeTransformers.given + +trait DocumentConverter[A]: + def convert(a: A): EntityDocument + +object DocumentConverter: + def create[A](f: A => EntityDocument): DocumentConverter[A] = + (a: A) => f(a) + + def fromTransformer[A](t: Transformer[A, EntityDocument]): DocumentConverter[A] = + create(t.transform) + + def apply[A](using d: DocumentConverter[A]): DocumentConverter[A] = d + + given DocumentConverter[ProjectCreated] = + fromTransformer( + _.into[ProjectDocument].transform( + Field.computed(_.owners, pc => List(Id(pc.createdBy))), + Field.default(_.members), + Field.default(_.score) + ) + ) + + given DocumentConverter[UserAdded] = + fromTransformer( + _.into[UserDocument].transform( + Field.default(_.score), + Field.computed(_.name, u => UserDocument.nameFrom(u.firstName, u.lastName)), + Field.default(_.visibility) + ) + ) + +extension [A: DocumentConverter](self: A) + def toDocument = DocumentConverter[A].convert(self) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentUpdates.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentUpdates.scala new file mode 100644 index 00000000..0b8fe4ad --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/DocumentUpdates.scala @@ -0,0 +1,91 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.syntax.all.* +import io.renku.search.provision.TypeTransformers.given +import io.renku.search.solr.documents.EntityDocument +import io.renku.search.solr.documents.Project as ProjectDocument +import io.renku.search.solr.documents.User as UserDocument +import io.github.arainko.ducktape.* +import io.renku.events.v1.* +import io.renku.search.model.Id + +object DocumentUpdates: + + def project(update: ProjectUpdated, orig: EntityDocument): Option[EntityDocument] = + orig match + case orig: ProjectDocument => + update + .into[ProjectDocument] + .transform( + Field.const(_.createdBy, orig.createdBy), + Field.const(_.creationDate, orig.creationDate), + Field.const(_.owners, orig.owners), + Field.const(_.members, orig.members), + Field.default(_.score) + ) + .some + case _ => None // todo really throw here? + + def user(update: UserUpdated, orig: EntityDocument): Option[EntityDocument] = orig match + case _: UserDocument => + update + .into[UserDocument] + .transform( + Field.default(_.score), + Field.computed(_.name, u => UserDocument.nameFrom(u.firstName, u.lastName)), + Field.default(_.visibility) + ) + .some + case _ => None + + def projectAuthAdded( + update: ProjectAuthorizationAdded, + orig: EntityDocument + ): Option[EntityDocument] = + orig match + case pd: ProjectDocument => + pd.addMember( + Id(update.userId), + memberRoleTransformer.transform(update.role) + ).some + case _ => None + + def projectAuthUpdated( + update: ProjectAuthorizationUpdated, + orig: EntityDocument + ): Option[EntityDocument] = + orig match + case pd: ProjectDocument => + pd.addMember( + Id(update.userId), + memberRoleTransformer.transform(update.role) + ).some + + case _ => None + + def projectAuthRemoved( + update: ProjectAuthorizationRemoved, + orig: EntityDocument + ): Option[EntityDocument] = + orig match + case pd: ProjectDocument => + pd.removeMember(Id(update.userId)).some + case _ => None diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/FetchFromSolr.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/FetchFromSolr.scala new file mode 100644 index 00000000..a603ec0a --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/FetchFromSolr.scala @@ -0,0 +1,116 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.effect.Sync +import cats.syntax.all.* +import fs2.{Pipe, Stream} + +import io.renku.search.model.Id +import io.renku.search.provision.handler.FetchFromSolr.MessageDocument +import io.renku.search.provision.handler.MessageReader.Message +import io.renku.search.solr.client.SearchSolrClient +import io.renku.search.solr.documents.EntityDocument +import io.renku.search.query.Query +import io.bullet.borer.derivation.MapBasedCodecs +import io.bullet.borer.Decoder +import io.renku.search.solr.schema.EntityDocumentSchema.Fields +import io.renku.solr.client.QueryString +import io.renku.solr.client.QueryData +import io.renku.search.solr.query.SolrToken +import io.renku.search.model.EntityType + +trait FetchFromSolr[F[_]]: + def fetch1[A](using IdExtractor[A]): Pipe[F, Message[A], MessageDocument[A]] + def fetchProjectForUser(userId: Id): Stream[F, FetchFromSolr.ProjectId] + +object FetchFromSolr: + final case class ProjectId(id: Id) + object ProjectId: + given Decoder[ProjectId] = MapBasedCodecs.deriveDecoder + + final case class MessageDocument[A: IdExtractor]( + message: MessageReader.Message[A], + documents: Map[Id, EntityDocument] + ): + def update( + f: (A, EntityDocument) => Option[EntityDocument] + ): Message[EntityDocument] = + Message( + message.raw, + message.decoded + .map(a => + documents + .get(IdExtractor[A].getId(a)) + .flatMap(doc => f(a, doc)) + ) + .flatten + ) + + def apply[F[_]: Sync]( + solrClient: SearchSolrClient[F] + ): FetchFromSolr[F] = + new FetchFromSolr[F] { + val logger = scribe.cats.effect[F] + + private def idQuery(id: Id): Query = + // TODO this must be renamed to "idIs" since we have only one id type + Query(Query.Segment.projectIdIs(id.value)) + + def fetchProjectForUser(userId: Id): Stream[F, FetchFromSolr.ProjectId] = + val query = QueryString( + List( + SolrToken.fieldIs( + Fields.entityType, + SolrToken.fromEntityType(EntityType.Project) + ), + List( + SolrToken.fieldIs(Fields.owners, SolrToken.fromId(userId)), + SolrToken.fieldIs(Fields.members, SolrToken.fromId(userId)) + ).foldOr + ).foldAnd.value + ) + solrClient.queryAll[ProjectId](QueryData(query)) + + def fetch1[A](using IdExtractor[A]): Pipe[F, Message[A], MessageDocument[A]] = + _.evalMap { msg => + val ids = msg.decoded.map(IdExtractor[A].getId) + val loaded = ids + .traverse(id => + solrClient + .queryEntity(idQuery(id), 1, 0) + .map(_.responseBody.docs.headOption) + .map(doc => id -> doc) + ) + .flatTap { results => + val notFound = results.filter(_._2.isEmpty).map(_._1.value).mkString(", ") + if (notFound.isEmpty) Sync[F].unit + else + logger.warn( + s"Document ids: '$notFound' for update doesn't exist in Solr; skipping" + ) + } + .map(_.foldLeft(Map.empty[Id, EntityDocument]) { + case (result, (id, Some(doc))) => result.updated(id, doc) + case (result, (id, None)) => result + }) + + loaded.map(m => MessageDocument(msg, m)) + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/IdExtractor.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/IdExtractor.scala new file mode 100644 index 00000000..e088df4b --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/IdExtractor.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import io.renku.search.model.Id +import io.renku.events.v1.* + +trait IdExtractor[A]: + def getId(a: A): Id + +object IdExtractor: + def apply[A](using e: IdExtractor[A]): IdExtractor[A] = e + + def create[A](f: A => Id): IdExtractor[A] = + (a: A) => f(a) + + def createStr[A](f: A => String): IdExtractor[A] = + (a: A) => Id(f(a)) + + given IdExtractor[ProjectUpdated] = createStr(_.id) + given IdExtractor[ProjectRemoved] = createStr(_.id) + given IdExtractor[UserUpdated] = createStr(_.id) + given IdExtractor[UserRemoved] = createStr(_.id) + given IdExtractor[ProjectAuthorizationAdded] = createStr(_.projectId) + given IdExtractor[ProjectAuthorizationUpdated] = createStr(_.projectId) + given IdExtractor[ProjectAuthorizationRemoved] = createStr(_.projectId) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/MessageReader.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/MessageReader.scala new file mode 100644 index 00000000..0b9175f9 --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/MessageReader.scala @@ -0,0 +1,107 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.effect.Sync +import cats.syntax.all.* +import fs2.Stream + +import io.renku.queue.client.{QueueClient, QueueMessage} +import io.renku.redis.client.{ClientId, QueueName} +import io.renku.search.provision.QueueMessageDecoder +import scala.concurrent.duration.FiniteDuration +import fs2.Chunk +import cats.effect.{Async, Resource} +import cats.Show +import io.renku.queue.client.RequestId +import io.renku.redis.client.MessageId +import scribe.Scribe + +trait MessageReader[F[_]]: + def read[A](using + QueueMessageDecoder[F, A], + Show[A] + ): Stream[F, MessageReader.Message[A]] + + def readGrouped[A](chunkSize: Int, timeout: FiniteDuration)(using + QueueMessageDecoder[F, A], + Show[A], + Async[F] + ): Stream[F, Chunk[MessageReader.Message[A]]] = + read[A].groupWithin(chunkSize, timeout) + + def markProcessed(id: MessageId): F[Unit] + def markProcessedError(err: Throwable, id: MessageId)(using logger: Scribe[F]): F[Unit] + +object MessageReader: + final case class Message[A](raw: QueueMessage, decoded: Seq[A]): + val id = raw.id + val requestId: RequestId = RequestId(raw.header.requestId) + def map[B](f: A => B): Message[B] = Message(raw, decoded.map(f)) + def stream[F[_]]: Stream[F, A] = Stream.emits(decoded).covary[F] + + /** MessageReader that dequeues messages attempt to decode it. If decoding fails, the + * message is marked as processed and the next message is read. + */ + def apply[F[_]: Sync]( + queueClient: Resource[F, QueueClient[F]], + queue: QueueName, + clientId: ClientId, + chunkSize: Int + ): MessageReader[F] = + new MessageReader[F]: + val logger = scribe.cats.effect[F] + + def read[A](using QueueMessageDecoder[F, A], Show[A]): Stream[F, Message[A]] = + for { + client <- Stream.resource(queueClient) + last <- Stream.eval(client.findLastProcessed(clientId, queue)) + qmsg <- client.acquireEventsStream(queue, chunkSize, last) + dec <- Stream + .eval(QueueMessageDecoder[F, A].decodeMessage(qmsg).attempt) + .flatMap { + case Right(dms) => Stream.emit(Message(qmsg, dms)) + case Left(err) => + for { + _ <- Stream.eval( + logger.error( + s"Decoding messageId: ${qmsg.id} for '${queue.name}' failed", + err + ) + ) + _ <- Stream.eval(client.markProcessed(clientId, queue, qmsg.id)) + } yield Message(qmsg, Seq.empty) + } + _ <- Stream.eval(logInfo(dec)) + } yield dec + + def markProcessed(id: MessageId): F[Unit] = + queueClient.use(_.markProcessed(clientId, queue, id)) + + def markProcessedError(err: Throwable, id: MessageId)(using + logger: Scribe[F] + ): F[Unit] = + markProcessed(id) >> + logger.error(s"Processing messageId: ${id} for '${queue}' failed", err) + + private def logInfo[A: Show](m: Message[A]): F[Unit] = + lazy val values = m.decoded.mkString_(", ") + logger.info( + s"""Received message queue: ${queue.name}, id: ${m.id}, source: ${m.raw.header.source}, type: ${m.raw.header.`type`} for: ${values}""" + ) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PipelineSteps.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PipelineSteps.scala new file mode 100644 index 00000000..6ad4cda6 --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PipelineSteps.scala @@ -0,0 +1,63 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.effect.{Resource, Sync} +import io.renku.search.solr.client.SearchSolrClient +import io.renku.queue.client.QueueClient +import io.renku.redis.client.ClientId +import io.renku.redis.client.QueueName +import io.renku.search.provision.QueuesConfig + +trait PipelineSteps[F[_]]: + def reader: MessageReader[F] + def converter: ConvertDocument[F] + def pushToSolr: PushToSolr[F] + def fetchFromSolr: FetchFromSolr[F] + def deleteFromSolr: DeleteFromSolr[F] + def pushToRedis: PushToRedis[F] + def userUtils: UserUtils[F] + +object PipelineSteps: + def apply[F[_]: Sync]( + solrClient: SearchSolrClient[F], + queueClient: Resource[F, QueueClient[F]], + queueConfig: QueuesConfig, + inChunkSize: Int, + clientId: ClientId + )( + queue: QueueName + ): PipelineSteps[F] = + new PipelineSteps[F] { + val reader: MessageReader[F] = + MessageReader[F](queueClient, queue, clientId, inChunkSize) + val converter: ConvertDocument[F] = + ConvertDocument[F] + val pushToSolr: PushToSolr[F] = + PushToSolr[F](solrClient, reader) + val fetchFromSolr: FetchFromSolr[F] = + FetchFromSolr[F](solrClient) + val deleteFromSolr: DeleteFromSolr[F] = + DeleteFromSolr[F](solrClient, reader) + val pushToRedis: PushToRedis[F] = + PushToRedis[F](queueClient, clientId, queueConfig) + + val userUtils: UserUtils[F] = + UserUtils[F](fetchFromSolr, pushToRedis) + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToRedis.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToRedis.scala new file mode 100644 index 00000000..814fcdeb --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToRedis.scala @@ -0,0 +1,78 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.Show +import cats.effect.{Resource, Sync} +import cats.syntax.all.* +import fs2.Pipe + +import io.renku.avro.codec.encoders.all.given +import io.renku.events.v1.ProjectAuthorizationRemoved +import io.renku.queue.client.* +import io.renku.redis.client.ClientId +import io.renku.redis.client.MessageId +import io.renku.search.provision.QueuesConfig + +trait PushToRedis[F[_]]: + def pushAuthorizationRemoved( + requestId: RequestId + )(using + Show[ProjectAuthorizationRemoved] + ): Pipe[F, ProjectAuthorizationRemoved, MessageId] + +object PushToRedis: + + def apply[F[_]: Sync]( + queueClient: Resource[F, QueueClient[F]], + clientId: ClientId, + queueConfig: QueuesConfig + ): PushToRedis[F] = + new PushToRedis[F] { + val logger = scribe.cats.effect[F] + def pushAuthorizationRemoved( + requestId: RequestId + )(using + Show[ProjectAuthorizationRemoved] + ): Pipe[F, ProjectAuthorizationRemoved, MessageId] = + _.evalMap(payload => + createHeader(requestId).flatMap { header => + logger.debug(show"Pushing $payload to redis") >> + queueClient.use( + _.enqueue( + queueConfig.projectAuthorizationRemoved, + header, + payload + ) + ) + } + ) + + def createHeader(requestId: RequestId): F[MessageHeader] = + CreationTime.nowF.map { now => + MessageHeader( + MessageSource(clientId.value), + ProjectAuthorizationRemoved.SCHEMA$, + DataContentType.Binary, + SchemaVersion.V1, + now, + requestId + ) + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToSolr.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToSolr.scala new file mode 100644 index 00000000..059cd995 --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/PushToSolr.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import cats.effect.Sync +import cats.syntax.all.* +import fs2.{Chunk, Pipe} + +import io.renku.queue.client.QueueMessage +import io.renku.search.solr.client.SearchSolrClient +import io.renku.search.solr.documents.EntityDocument + +trait PushToSolr[F[_]]: + def pushChunk: Pipe[F, Chunk[MessageReader.Message[EntityDocument]], Unit] + def push: Pipe[F, MessageReader.Message[EntityDocument], Unit] = + _.chunks.through(pushChunk) + def push1: Pipe[F, MessageReader.Message[EntityDocument], Unit] = + _.map(Chunk(_)).through(pushChunk) + +object PushToSolr: + + def apply[F[_]: Sync]( + solrClient: SearchSolrClient[F], + reader: MessageReader[F] + ): PushToSolr[F] = + new PushToSolr[F] { + val logger = scribe.cats.effect[F] + def pushChunk: Pipe[F, Chunk[MessageReader.Message[EntityDocument]], Unit] = + _.evalMap { docs => + val docSeq = docs.toList.flatMap(_.decoded) + docs.last.map(_.raw) match + case Some(lastMessage) => + logger.debug(s"Push ${docSeq} to solr") >> + solrClient + .insert(docSeq) + .flatMap(_ => reader.markProcessed(lastMessage.id)) + .onError(reader.markProcessedError(_, lastMessage.id)(using logger)) + case None => + Sync[F].unit + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/QueueMessageDecoder.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/QueueMessageDecoder.scala new file mode 100644 index 00000000..06ed7cfa --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/QueueMessageDecoder.scala @@ -0,0 +1,73 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.MonadThrow +import cats.syntax.all.* +import io.renku.avro.codec.{AvroDecoder, AvroReader} +import io.renku.avro.codec.all.given +import io.renku.queue.client.{DataContentType, QueueMessage} +import org.apache.avro.Schema +import io.renku.events.v1.* + +trait QueueMessageDecoder[F[_], A]: + def decodeMessage(message: QueueMessage): F[Seq[A]] + +object QueueMessageDecoder: + def from[F[_]: MonadThrow, A](schema: Schema)(using + AvroDecoder[A] + ): QueueMessageDecoder[F, A] = { + val avro = AvroReader(schema) + new QueueMessageDecoder: + def decodeMessage(message: QueueMessage): F[Seq[A]] = + findContentType.andThenF(decodePayload(message))(message) + + private def findContentType(message: QueueMessage): F[DataContentType] = + MonadThrow[F].fromEither(DataContentType.from(message.header.dataContentType)) + + private def decodePayload(message: QueueMessage): DataContentType => F[Seq[A]] = { + case DataContentType.Binary => catchNonFatal(avro.read[A](message.payload)) + case DataContentType.Json => catchNonFatal(avro.readJson[A](message.payload)) + } + + private def catchNonFatal(f: => Seq[A]): F[Seq[A]] = + MonadThrow[F].catchNonFatal(f) + } + + def apply[F[_], A](using d: QueueMessageDecoder[F, A]): QueueMessageDecoder[F, A] = d + + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectCreated] = + from(ProjectCreated.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectUpdated] = + from(ProjectUpdated.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectRemoved] = + from(ProjectRemoved.SCHEMA$) + + given [F[_]: MonadThrow]: QueueMessageDecoder[F, UserAdded] = from(UserAdded.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, UserUpdated] = + from(UserUpdated.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, UserRemoved] = + from(UserRemoved.SCHEMA$) + + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectAuthorizationAdded] = + from(ProjectAuthorizationAdded.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectAuthorizationUpdated] = + from(ProjectAuthorizationUpdated.SCHEMA$) + given [F[_]: MonadThrow]: QueueMessageDecoder[F, ProjectAuthorizationRemoved] = + from(ProjectAuthorizationRemoved.SCHEMA$) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ShowInstances.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ShowInstances.scala new file mode 100644 index 00000000..bf047b5e --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/ShowInstances.scala @@ -0,0 +1,59 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import io.renku.events.v1.* +import cats.Show +import cats.syntax.all.* + +trait ShowInstances: + given Show[ProjectCreated] = + Show.show[ProjectCreated](pc => show"slug '${pc.slug}'") + + given Show[ProjectUpdated] = + Show.show[ProjectUpdated](pc => show"slug '${pc.slug}'") + + given Show[ProjectRemoved] = + Show.show[ProjectRemoved](pr => show"slug '${pr.id}'") + + given Show[UserUpdated] = + Show.show[UserUpdated](u => s"id '${u.id}'") + + given Show[UserAdded] = + Show.show[UserAdded](u => + u.lastName.map(v => s"lastName '$v'").getOrElse(s"id '${u.id}'") + ) + + given Show[UserRemoved] = + Show.show[UserRemoved](e => show"id '${e.id}'") + + given Show[ProjectAuthorizationAdded] = + Show.show[ProjectAuthorizationAdded](v => + s"projectId '${v.projectId}', userId '${v.userId}', role '${v.role}'" + ) + + given Show[ProjectAuthorizationUpdated] = + Show.show[ProjectAuthorizationUpdated](v => + s"projectId '${v.projectId}', userId '${v.userId}', role '${v.role}'" + ) + + given Show[ProjectAuthorizationRemoved] = + Show.show[ProjectAuthorizationRemoved](v => + s"projectId '${v.projectId}', userId '${v.userId}'" + ) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/TypeTransformers.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/TypeTransformers.scala similarity index 100% rename from modules/search-provision/src/main/scala/io/renku/search/provision/TypeTransformers.scala rename to modules/search-provision/src/main/scala/io/renku/search/provision/handler/TypeTransformers.scala diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/handler/UserUtils.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/UserUtils.scala new file mode 100644 index 00000000..30d526da --- /dev/null +++ b/modules/search-provision/src/main/scala/io/renku/search/provision/handler/UserUtils.scala @@ -0,0 +1,51 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision.handler + +import fs2.{Pipe, Stream} +import MessageReader.Message +import io.renku.search.model.Id +import cats.effect.Sync +import io.renku.events.v1.ProjectAuthorizationRemoved + +trait UserUtils[F[_]]: + def removeFromProjects: Pipe[F, Message[Id], Unit] + +object UserUtils: + def apply[F[_]: Sync]( + fetchFromSolr: FetchFromSolr[F], + pushToRedis: PushToRedis[F] + ): UserUtils[F] = + new UserUtils[F] with ShowInstances { + val logger = scribe.cats.effect[F] + def removeFromProjects: Pipe[F, Message[Id], Unit] = + _.evalMap { msg => + (Stream.eval( + logger.debug(s"Send authRemove events for user ids: ${msg.decoded}") + ) ++ + Stream + .emits(msg.decoded) + .flatMap(id => fetchFromSolr.fetchProjectForUser(id).map(_ -> id)) + .map { case (projectId, userId) => + ProjectAuthorizationRemoved(projectId.id.value, userId.value) + } + .evalTap(data => logger.info(s"Sending $data to redis")) + .through(pushToRedis.pushAuthorizationRemoved(msg.requestId))).compile.drain + } + } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationAddedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationAddedProvisioning.scala deleted file mode 100644 index a1d70d49..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationAddedProvisioning.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision -package project - -import cats.Show -import cats.effect.{Async, Resource} -import fs2.io.net.Network -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1.ProjectAuthorizationAdded -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.provision.TypeTransformers.given -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object AuthorizationAddedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpdateProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - - UpdateProvisioningProcess.make[F, ProjectAuthorizationAdded, documents.Project]( - queueName, - ProjectAuthorizationAdded.SCHEMA$, - idExtractor, - docUpdate, - redisConfig, - solrConfig - ) - - private given Show[ProjectAuthorizationAdded] = - Show.show[ProjectAuthorizationAdded](v => - s"projectId '${v.projectId}', userId '${v.userId}', role '${v.role}'" - ) - - private lazy val idExtractor: ProjectAuthorizationAdded => Id = - paa => Id(paa.projectId) - - private lazy val docUpdate - : ((ProjectAuthorizationAdded, documents.Project)) => documents.Project = { - case (update, orig) => - orig.addMember( - Id(update.userId), - memberRoleTransformer.transform(update.role) - ) - } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioning.scala deleted file mode 100644 index 1189bf2c..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioning.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision -package project - -import cats.Show -import cats.effect.{Async, Resource} -import fs2.io.net.Network -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1.ProjectAuthorizationRemoved -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object AuthorizationRemovedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpdateProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - - UpdateProvisioningProcess.make[F, ProjectAuthorizationRemoved, documents.Project]( - queueName, - ProjectAuthorizationRemoved.SCHEMA$, - idExtractor, - docUpdate, - redisConfig, - solrConfig - ) - - private given Show[ProjectAuthorizationRemoved] = - Show.show[ProjectAuthorizationRemoved](v => - s"projectId '${v.projectId}', userId '${v.userId}'" - ) - - private lazy val idExtractor: ProjectAuthorizationRemoved => Id = par => - Id(par.projectId) - - private lazy val docUpdate - : ((ProjectAuthorizationRemoved, documents.Project)) => documents.Project = { - case (update, orig) => - orig.removeMember(Id(update.userId)) - } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioning.scala deleted file mode 100644 index d10eaa67..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioning.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision -package project - -import cats.Show -import cats.effect.{Async, Resource} -import fs2.io.net.Network -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1.ProjectAuthorizationUpdated -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.provision.TypeTransformers.given -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object AuthorizationUpdatedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpdateProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - - UpdateProvisioningProcess.make[F, ProjectAuthorizationUpdated, documents.Project]( - queueName, - ProjectAuthorizationUpdated.SCHEMA$, - idExtractor, - docUpdate, - redisConfig, - solrConfig - ) - - private given Show[ProjectAuthorizationUpdated] = - Show.show[ProjectAuthorizationUpdated](v => - s"projectId '${v.projectId}', userId '${v.userId}', role '${v.role}'" - ) - - private lazy val idExtractor: ProjectAuthorizationUpdated => Id = - pau => Id(pau.projectId) - - private lazy val docUpdate - : ((ProjectAuthorizationUpdated, documents.Project)) => documents.Project = { - case (update, orig) => - orig.addMember( - Id(update.userId), - memberRoleTransformer.transform(update.role) - ) - } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectCreatedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectCreatedProvisioning.scala deleted file mode 100644 index 3e89292b..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectCreatedProvisioning.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision.project - -import cats.Show -import cats.effect.{Async, Resource} -import cats.syntax.all.* -import fs2.io.net.Network - -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1 -import io.renku.events.v1.{ProjectCreated, Visibility} -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.* -import io.renku.search.provision.TypeTransformers.given -import io.renku.search.provision.UpsertProvisioningProcess -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object ProjectCreatedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpsertProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - UpsertProvisioningProcess.make[F, ProjectCreated, documents.Project]( - queueName, - ProjectCreated.SCHEMA$, - redisConfig, - solrConfig - ) - - private given Show[ProjectCreated] = - Show.show[ProjectCreated](pc => show"slug '${pc.slug}'") - - private given Transformer[ProjectCreated, documents.Project] = - _.into[documents.Project].transform( - Field.computed(_.owners, pc => List(Id(pc.createdBy))), - Field.default(_.members), - Field.default(_.score) - ) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectRemovedProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectRemovedProcess.scala deleted file mode 100644 index c696e367..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectRemovedProcess.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision.project - -import cats.Show -import cats.effect.{Async, Resource} -import cats.syntax.all.* -import fs2.io.net.Network -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1 -import io.renku.events.v1.ProjectRemoved -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.provision.SolrRemovalProcess -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object ProjectRemovedProcess: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, SolrRemovalProcess[F]] = - given Scribe[F] = scribe.cats[F] - SolrRemovalProcess.make[F, ProjectRemoved]( - queueName, - ProjectRemoved.SCHEMA$, - redisConfig, - solrConfig, - onSolrPersist = None - ) - - private given Show[ProjectRemoved] = - Show.show[ProjectRemoved](pr => show"slug '${pr.id}'") - - private given Transformer[ProjectRemoved, Id] = - r => Id(r.id) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectUpdatedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectUpdatedProvisioning.scala deleted file mode 100644 index 4971d84f..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/project/ProjectUpdatedProvisioning.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision -package project - -import cats.Show -import cats.effect.{Async, Resource} -import fs2.io.net.Network -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1.ProjectUpdated -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.provision.TypeTransformers.given -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object ProjectUpdatedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpdateProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - UpdateProvisioningProcess.make[F, ProjectUpdated, documents.Project]( - queueName, - ProjectUpdated.SCHEMA$, - idExtractor, - docUpdate, - redisConfig, - solrConfig - ) - - private given Show[ProjectUpdated] = - Show.show[ProjectUpdated](v => s"slug '${v.slug}'") - - private lazy val idExtractor: ProjectUpdated => Id = pu => Id(pu.id) - - private lazy val docUpdate - : ((ProjectUpdated, documents.Project)) => documents.Project = { - case (update, orig) => - update - .into[documents.Project] - .transform( - Field.const(_.createdBy, orig.createdBy), - Field.const(_.creationDate, orig.creationDate), - Field.const(_.owners, orig.owners), - Field.const(_.members, orig.members), - Field.default(_.score) - ) - } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserAddedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserAddedProvisioning.scala deleted file mode 100644 index f2ac173e..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserAddedProvisioning.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision.user - -import cats.Show -import cats.effect.Async -import cats.effect.Resource -import cats.syntax.all.* -import fs2.io.net.Network -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1 -import io.renku.events.v1.UserAdded -import io.renku.redis.client.QueueName -import io.renku.redis.client.RedisConfig -import io.renku.search.provision.UpsertProvisioningProcess -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -trait UserAddedProvisioning[F[_]] extends UpsertProvisioningProcess[F] - -object UserAddedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpsertProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - UpsertProvisioningProcess.make[F, UserAdded, documents.User]( - queueName, - UserAdded.SCHEMA$, - redisConfig, - solrConfig - ) - - private given Show[UserAdded] = - Show.show[UserAdded](u => - u.lastName.map(v => s"lastName '$v'").getOrElse(s"id '${u.id}'") - ) - - private given Transformer[UserAdded, documents.User] = - _.into[documents.User].transform( - Field.default(_.score), - Field.computed(_.name, u => documents.User.nameFrom(u.firstName, u.lastName)) - ) diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserRemovedProcess.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserRemovedProcess.scala deleted file mode 100644 index 79fedf51..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserRemovedProcess.scala +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision.user - -import cats.Show -import cats.effect.{Async, Resource} -import cats.syntax.all.* -import fs2.Stream -import fs2.io.net.Network -import io.bullet.borer.Decoder -import io.bullet.borer.derivation.MapBasedCodecs.deriveDecoder -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.avro.codec.encoders.all.given -import io.renku.events.v1 -import io.renku.events.v1.{ProjectAuthorizationRemoved, UserRemoved} -import io.renku.queue.client.* -import io.renku.queue.client.DataContentType.Binary -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.Id -import io.renku.search.provision.ProvisioningProcess.clientId -import io.renku.search.provision.{OnSolrPersist, SolrRemovalProcess} -import io.renku.search.solr.client.SearchSolrClient -import io.renku.search.solr.documents.Project -import io.renku.search.solr.schema.EntityDocumentSchema.Fields -import io.renku.solr.client.{QueryData, SolrConfig} -import scribe.Scribe - -object UserRemovedProcess: - - def make[F[_]: Async: Network]( - userRemovedQueue: QueueName, - authRemovedQueue: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, SolrRemovalProcess[F]] = - given Scribe[F] = scribe.cats[F] - - SolrRemovalProcess.make[F, UserRemoved]( - userRemovedQueue, - UserRemoved.SCHEMA$, - redisConfig, - solrConfig, - onSolrPersist = Some(onSolrPersist[F](authRemovedQueue)) - ) - - private given Show[UserRemoved] = - Show.show[UserRemoved](e => show"id '${e.id}'") - - private given Transformer[UserRemoved, Id] = - r => Id(r.id) - - private def onSolrPersist[F[_]: Async](authRemovedQueue: QueueName) = - - case class ProjectId(id: String) - given Decoder[ProjectId] = deriveDecoder[ProjectId] - - new OnSolrPersist[F, UserRemoved] { - override def execute(in: UserRemoved, requestId: RequestId)( - queueClient: QueueClient[F], - solrClient: SearchSolrClient[F] - ): F[Unit] = - findAffectedProjects(solrClient, in.id) - .evalMap(enqueueAuthRemoved(queueClient, requestId, _, in.id)) - .compile - .drain - - private def findAffectedProjects( - sc: SearchSolrClient[F], - userId: String - ): Stream[F, ProjectId] = - Stream - .iterate(1)(_ + 1) - .evalMap(p => sc.query[ProjectId](prepareQuery(userId, p))) - .map(_.responseBody.docs) - .takeWhile(_.nonEmpty) - .flatMap(Stream.emits) - - private val pageSize = 20 - - private def prepareQuery(userId: String, page: Int) = - QueryData( - s"${Fields.entityType}:${Project.entityType} ${Fields.owners}:$userId ${Fields.members}:$userId", - filter = Seq.empty, - limit = pageSize * page, - offset = pageSize * (page - 1) - ).withFields(Fields.id) - - private def enqueueAuthRemoved( - qc: QueueClient[F], - requestId: RequestId, - projectId: ProjectId, - userId: String - ): F[Unit] = - qc.enqueue( - authRemovedQueue, - createHeader(requestId), - ProjectAuthorizationRemoved(projectId.id, userId) - ).void - - private def createHeader(requestId: RequestId) = - MessageHeader( - MessageSource(clientId.value), - ProjectAuthorizationRemoved.SCHEMA$, - Binary, - SchemaVersion("V1"), - CreationTime.now, - requestId - ) - } diff --git a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserUpdatedProvisioning.scala b/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserUpdatedProvisioning.scala deleted file mode 100644 index b006fb9f..00000000 --- a/modules/search-provision/src/main/scala/io/renku/search/provision/user/UserUpdatedProvisioning.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2024 Swiss Data Science Center (SDSC) - * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and - * Eidgenössische Technische Hochschule Zürich (ETHZ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.renku.search.provision -package user - -import cats.Show -import cats.effect.{Async, Resource} -import fs2.io.net.Network -import io.bullet.borer.Codec.* -import io.github.arainko.ducktape.* -import io.renku.avro.codec.decoders.all.given -import io.renku.events.v1.UserUpdated -import io.renku.redis.client.{QueueName, RedisConfig} -import io.renku.search.model.{Id, Name, users} -import io.renku.search.solr.documents -import io.renku.solr.client.SolrConfig -import scribe.Scribe - -object UserUpdatedProvisioning: - - def make[F[_]: Async: Network]( - queueName: QueueName, - redisConfig: RedisConfig, - solrConfig: SolrConfig - ): Resource[F, UpdateProvisioningProcess[F]] = - given Scribe[F] = scribe.cats[F] - UpdateProvisioningProcess.make[F, UserUpdated, documents.User]( - queueName, - UserUpdated.SCHEMA$, - idExtractor, - docUpdate, - redisConfig, - solrConfig - ) - - private given Show[UserUpdated] = - Show.show[UserUpdated](u => s"id '${u.id}'") - - private lazy val idExtractor: UserUpdated => Id = uu => Id(uu.id) - - private lazy val docUpdate: ((UserUpdated, documents.User)) => documents.User = { - case (update, _) => - update - .into[documents.User] - .transform( - Field.default(_.score), - Field.computed(_.name, u => documents.User.nameFrom(u.firstName, u.lastName)) - ) - } diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala new file mode 100644 index 00000000..1c07111b --- /dev/null +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/ProvisioningSuite.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Swiss Data Science Center (SDSC) + * A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and + * Eidgenössische Technische Hochschule Zürich (ETHZ). + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.renku.search.provision + +import cats.effect.{IO, Resource} +import cats.syntax.all.* + +import io.renku.queue.client.QueueSpec +import io.renku.redis.client.ClientId +import io.renku.redis.client.QueueName +import io.renku.search.provision.project.ProjectSyntax +import io.renku.search.provision.handler.PipelineSteps +import io.renku.search.solr.client.SearchSolrSpec +import munit.CatsEffectSuite +import io.renku.queue.client.QueueClient +import io.renku.search.solr.client.SearchSolrClient +import io.renku.search.provision.user.UserSyntax +import io.renku.search.LoggingConfigure + +trait ProvisioningSuite + extends CatsEffectSuite + with LoggingConfigure + with QueueSpec + with SearchSolrSpec + with ProjectSyntax + with UserSyntax: + + val queueConfig: QueuesConfig = QueuesConfig( + projectCreated = QueueName("projectCreated"), + projectUpdated = QueueName("projectUpdated"), + projectRemoved = QueueName("projectRemoved"), + projectAuthorizationAdded = QueueName("projectAuthorizationAdded"), + projectAuthorizationUpdated = QueueName("projectAuthorizationUpdated"), + projectAuthorizationRemoved = QueueName("projectAuthorizationRemoved"), + userAdded = QueueName("userAdded"), + userUpdated = QueueName("userUpdated"), + userRemoved = QueueName("userRemoved") + ) + + def withMessageHandlers( + cfg: QueuesConfig = queueConfig + ): Resource[IO, (MessageHandlers[IO], QueueClient[IO], SearchSolrClient[IO])] = + val clientId = ClientId("provision-test-client") + (withSearchSolrClient(), withQueueClient()).mapN { (solrClient, queueClient) => + val steps = + PipelineSteps[IO]( + solrClient, + Resource.pure(queueClient), + queueConfig, + 1, + clientId + ) + val handlers = MessageHandlers[IO](steps, queueConfig) + (handlers, queueClient, solrClient) + } diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala index 661a2b8f..131f4458 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationAddedProvisioningSpec.scala @@ -29,38 +29,30 @@ import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.{ProjectAuthorizationAdded, ProjectMemberRole} import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.{Id, projects} -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, Project} +import io.renku.search.provision.ProvisioningSuite +import io.renku.search.solr.documents.{EntityDocument, Project} import munit.CatsEffectSuite -class AuthorizationAddedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: +class AuthorizationAddedProvisioningSpec extends ProvisioningSuite: (memberAdded :: ownerAdded :: noUpdate :: Nil) .foreach { case TestCase(name, updateF) => test(s"can fetch events, decode them, and update docs in Solr in case of $name"): - val queue = RedisClientGenerators.queueNameGen.generateOne - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for solrDocs <- SignallingRef.of[IO, Set[Project]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectAuthAdded.compile.drain.start projectDoc = projectCreatedGen("member-add").generateOne.toSolrDocument _ <- solrClient.insert(Seq(projectDoc.widen)) authAdded = updateF(projectDoc) _ <- queueClient.enqueue( - queue, + queueConfig.projectAuthorizationAdded, messageHeaderGen(ProjectAuthorizationAdded.SCHEMA$).generateOne, authAdded ) @@ -68,8 +60,15 @@ class AuthorizationAddedProvisioningSpec docsCollectorFiber <- Stream .awakeEvery[IO](500 millis) - .evalMap(_ => solrClient.findById[Project](projectDoc.id)) - .evalMap(_.fold(().pure[IO])(e => solrDocs.update(_ => Set(e)))) + .evalTap(_ => + scribe.cats.io.info(s"Looking for project with id ${projectDoc.id}...") + ) + .evalMap(_ => solrClient.findById[EntityDocument](projectDoc.id)) + .evalMap( + _.fold(().pure[IO])(e => + solrDocs.update(_ => Set(e.asInstanceOf[Project])) + ) + ) .compile .drain .start @@ -87,18 +86,6 @@ class AuthorizationAddedProvisioningSpec } } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - AuthorizationAddedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - private case class TestCase(name: String, f: Project => ProjectAuthorizationAdded) private lazy val memberAdded = TestCase( "member added", diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala index d42689df..aeae51da 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationRemovedProvisioningSpec.scala @@ -30,28 +30,19 @@ import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.* import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrSpec import io.renku.search.solr.documents.Project import munit.CatsEffectSuite -class AuthorizationRemovedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: +class AuthorizationRemovedProvisioningSpec extends ProvisioningSuite: test("can fetch events, decode them, and update docs in Solr"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for solrDocs <- SignallingRef.of[IO, Set[Project]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectAuthRemoved.compile.drain.start projectDoc = projectCreatedGen("member-remove").generateOne.toSolrDocument _ <- solrClient.insert(Seq(projectDoc.widen)) @@ -61,7 +52,7 @@ class AuthorizationRemovedProvisioningSpec projectDoc.createdBy.value ) _ <- queueClient.enqueue( - queue, + queueConfig.projectAuthorizationRemoved, messageHeaderGen(ProjectAuthorizationRemoved.SCHEMA$).generateOne, authRemoved ) @@ -84,17 +75,5 @@ class AuthorizationRemovedProvisioningSpec yield () } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - AuthorizationRemovedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - override def munitFixtures: Seq[Fixture[_]] = List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala index f5b3f0dc..7c4ec83c 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/AuthorizationUpdatedProvisioningSpec.scala @@ -30,38 +30,28 @@ import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.{ProjectAuthorizationUpdated, ProjectMemberRole} import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.{Id, projects} -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, Project} +import io.renku.search.solr.documents.{EntityDocument, Project} import munit.CatsEffectSuite -class AuthorizationUpdatedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: +class AuthorizationUpdatedProvisioningSpec extends ProvisioningSuite: (memberAdded :: ownerAdded :: noUpdate :: Nil) .foreach { case TestCase(name, updateF) => test(s"can fetch events, decode them, and update docs in Solr in case of $name"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for solrDocs <- SignallingRef.of[IO, Set[Project]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectAuthUpdated.compile.drain.start projectDoc = projectCreatedGen("member-add").generateOne.toSolrDocument _ <- solrClient.insert(Seq(projectDoc.widen)) authAdded = updateF(projectDoc) _ <- queueClient.enqueue( - queue, + queueConfig.projectAuthorizationUpdated, messageHeaderGen(ProjectAuthorizationUpdated.SCHEMA$).generateOne, authAdded ) @@ -88,18 +78,6 @@ class AuthorizationUpdatedProvisioningSpec } } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - AuthorizationUpdatedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - private case class TestCase(name: String, f: Project => ProjectAuthorizationUpdated) private lazy val memberAdded = TestCase( diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala index be34474f..57779de8 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectCreatedProvisioningSpec.scala @@ -23,40 +23,30 @@ import cats.effect.{IO, Resource} import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef -import io.renku.avro.codec.AvroIO import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.projectCreatedGen import io.renku.events.v1.{ProjectCreated, Visibility} import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.{DataContentType, QueueSpec} -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} +import io.renku.queue.client.DataContentType import io.renku.search.GeneratorSyntax.* import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, Project} +import io.renku.search.provision.ProvisioningSuite +import io.renku.search.solr.documents.{EntityDocument, Project} import munit.CatsEffectSuite -class ProjectCreatedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: - - private val avro = AvroIO(ProjectCreated.SCHEMA$) +class ProjectCreatedProvisioningSpec extends ProvisioningSuite: test("can fetch events binary encoded, decode them, and send them to Solr"): - val queue = RedisClientGenerators.queueNameGen.generateOne - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for - solrDocs <- SignallingRef.of[IO, Set[Entity]](Set.empty) + solrDocs <- SignallingRef.of[IO, Set[EntityDocument]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectCreated.compile.drain.start created = projectCreatedGen(prefix = "binary").generateOne _ <- queueClient.enqueue( - queue, + queueConfig.projectCreated, messageHeaderGen(ProjectCreated.SCHEMA$, DataContentType.Binary).generateOne, created ) @@ -78,17 +68,15 @@ class ProjectCreatedProvisioningSpec } test("can fetch events JSON encoded, decode them, and send them to Solr"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for - solrDocs <- SignallingRef.of[IO, Set[Entity]](Set.empty) + solrDocs <- SignallingRef.of[IO, Set[EntityDocument]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectCreated.compile.drain.start created = projectCreatedGen(prefix = "json").generateOne _ <- queueClient.enqueue( - queue, + queueConfig.projectCreated, messageHeaderGen(ProjectCreated.SCHEMA$, DataContentType.Json).generateOne, created ) @@ -109,17 +97,5 @@ class ProjectCreatedProvisioningSpec yield () } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - ProjectCreatedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - override def munitFixtures: Seq[Fixture[_]] = List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala index 88ca1dd7..8d65b7aa 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectRemovedProcessSpec.scala @@ -18,45 +18,33 @@ package io.renku.search.provision.project +import scala.concurrent.duration.* + import cats.effect.{IO, Resource} -import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef -import io.renku.avro.codec.AvroIO + import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.ProjectRemoved import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.{EntityType, Id} +import io.renku.search.provision.ProvisioningSuite import io.renku.search.query.Query import io.renku.search.query.Query.Segment import io.renku.search.query.Query.Segment.typeIs -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, Project} +import io.renku.search.solr.documents.{EntityDocument, Project} import munit.CatsEffectSuite -import scala.concurrent.duration.* - -class ProjectRemovedProcessSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: - - private val avro = AvroIO(ProjectRemoved.SCHEMA$) +class ProjectRemovedProcessSpec extends ProvisioningSuite: test(s"can fetch events, decode them, and remove Solr"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for solrDoc <- SignallingRef.of[IO, Option[Project]](None) - provisioningFiber <- provisioner.removalProcess.start + provisioningFiber <- handlers.projectRemoved.compile.drain.start created = projectCreatedGen(prefix = "remove").generateOne _ <- solrClient.insert(Seq(created.toSolrDocument.widen)) @@ -76,7 +64,7 @@ class ProjectRemovedProcessSpec removed = ProjectRemoved(created.id) _ <- queueClient.enqueue( - queue, + queueConfig.projectRemoved, messageHeaderGen(ProjectRemoved.SCHEMA$).generateOne, removed ) @@ -92,17 +80,5 @@ class ProjectRemovedProcessSpec private lazy val queryProjects = Query(typeIs(EntityType.Project)) - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - ProjectRemovedProcess - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - override def munitFixtures: Seq[Fixture[_]] = List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala index e8b47ca5..049987f3 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/project/ProjectUpdatedProvisioningSpec.scala @@ -24,45 +24,33 @@ import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef import io.github.arainko.ducktape.* -import io.renku.avro.codec.AvroIO import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.{ProjectCreated, ProjectUpdated} import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, Project} +import io.renku.search.provision.ProvisioningSuite +import io.renku.search.solr.documents.{EntityDocument, Project} import munit.CatsEffectSuite -class ProjectUpdatedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: - - private val avro = AvroIO(ProjectUpdated.SCHEMA$) +class ProjectUpdatedProvisioningSpec extends ProvisioningSuite: (nameUpdate :: slugUpdate :: repositoriesUpdate :: visibilityUpdate :: descUpdate :: noUpdate :: Nil) .foreach { case TestCase(name, updateF) => test(s"can fetch events, decode them, and update in Solr in case of $name"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for - solrDocs <- SignallingRef.of[IO, Set[Entity]](Set.empty) + solrDocs <- SignallingRef.of[IO, Set[EntityDocument]](Set.empty) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.projectUpdated.compile.drain.start created = projectCreatedGen(prefix = "update").generateOne _ <- solrClient.insert(Seq(created.toSolrDocument.widen)) updated = updateF(created) _ <- queueClient.enqueue( - queue, + queueConfig.projectUpdated, messageHeaderGen(ProjectUpdated.SCHEMA$).generateOne, updated ) @@ -86,18 +74,6 @@ class ProjectUpdatedProvisioningSpec } } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - ProjectUpdatedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - private case class TestCase(name: String, f: ProjectCreated => ProjectUpdated) private lazy val nameUpdate = TestCase( "name update", diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala index 528fecd9..0ae4dbd7 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserAddedProvisioningSpec.scala @@ -19,42 +19,32 @@ package io.renku.search.provision.user import cats.effect.{IO, Resource} -import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.userAddedGen import io.renku.events.v1.UserAdded import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, User} +import io.renku.search.solr.documents.{EntityDocument, User} import munit.CatsEffectSuite import scala.concurrent.duration.* +import io.renku.search.provision.ProvisioningSuite -class UserAddedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with UserSyntax: +class UserAddedProvisioningSpec extends ProvisioningSuite: test("can fetch events, decode them, and send them to Solr"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => for - solrDoc <- SignallingRef.of[IO, Option[Entity]](None) + solrDoc <- SignallingRef.of[IO, Option[EntityDocument]](None) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handlers.userAdded.compile.drain.start userAdded = userAddedGen(prefix = "user-added").generateOne _ <- queueClient.enqueue( - queue, + queueConfig.userAdded, messageHeaderGen(UserAdded.SCHEMA$).generateOne, userAdded ) @@ -75,17 +65,5 @@ class UserAddedProvisioningSpec yield () } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - UserAddedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - override def munitFixtures: Seq[Fixture[_]] = List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala index 5c755edb..fdd37bac 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserRemovedProcessSpec.scala @@ -19,127 +19,90 @@ package io.renku.search.provision.user import cats.effect.{IO, Resource} -import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef -import io.renku.avro.codec.AvroIO -import io.renku.avro.codec.decoders.all.given import io.renku.avro.codec.encoders.all.given import io.renku.events.EventsGenerators.* import io.renku.events.v1.* import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* -import io.renku.search.model.EntityType import io.renku.search.model.ModelGenerators.projectMemberRoleGen import io.renku.search.provision.QueueMessageDecoder -import io.renku.search.provision.project.ProjectSyntax -import io.renku.search.query.Query -import io.renku.search.query.Query.Segment -import io.renku.search.query.Query.Segment.typeIs -import io.renku.search.solr.client.SearchSolrSpec import io.renku.search.solr.client.SolrDocumentGenerators.* -import io.renku.search.solr.documents.{Entity, User} -import munit.CatsEffectSuite +import io.renku.search.solr.documents.{EntityDocument, User} import scala.concurrent.duration.* +import io.renku.search.provision.ProvisioningSuite -class UserRemovedProcessSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with ProjectSyntax: - - private val avro = AvroIO(UserRemoved.SCHEMA$) - +class UserRemovedProcessSpec extends ProvisioningSuite: test( "can fetch events, decode them, and remove from Solr relevant User document " + "and issue ProjectAuthorizationRemoved events for all affected projects" ): - val userRemovedQueue = RedisClientGenerators.queueNameGen.generateOne - val authRemovedQueue = RedisClientGenerators.queueNameGen.generateOne - val messageDecoder = QueueMessageDecoder[IO, ProjectAuthorizationRemoved]( - ProjectAuthorizationRemoved.SCHEMA$ - ) - - clientsAndProvisioning(userRemovedQueue, authRemovedQueue).use { - case (queueClient, solrClient, provisioner) => - for - solrDoc <- SignallingRef.of[IO, Option[Entity]](None) - authRemovalEvents <- SignallingRef.of[IO, Set[ProjectAuthorizationRemoved]]( - Set.empty - ) - - provisioningFiber <- provisioner.removalProcess.start - - user = userDocumentGen.generateOne - affectedProjects = projectCreatedGen("affected") - .map(_.toSolrDocument.addMember(user.id, projectMemberRoleGen.generateOne)) - .generateList(min = 20, max = 25) - notAffectedProject = projectCreatedGen( - "not-affected" - ).generateOne.toSolrDocument - _ <- solrClient.insert(user :: notAffectedProject :: affectedProjects) - - docsCollectorFiber <- - Stream - .awakeEvery[IO](500 millis) - .evalMap(_ => solrClient.findById[User](user.id)) - .evalMap(e => solrDoc.update(_ => e)) - .compile - .drain - .start - - _ <- solrDoc.waitUntil(_.nonEmpty) - - eventsCollectorFiber <- - queueClient - .acquireEventsStream(authRemovedQueue, 1, None) - .evalMap(messageDecoder.decodeMessage) - .evalMap(e => authRemovalEvents.update(_ ++ e)) - .compile - .drain - .start - - _ <- queueClient.enqueue( - userRemovedQueue, - messageHeaderGen(UserRemoved.SCHEMA$).generateOne, - UserRemoved(user.id.value) - ) - - _ <- solrDoc.waitUntil(_.isEmpty) - - _ <- authRemovalEvents.waitUntil( - _ == affectedProjects - .map(ap => ProjectAuthorizationRemoved(ap.id.value, user.id.value)) - .toSet - ) - - _ <- provisioningFiber.cancel - _ <- docsCollectorFiber.cancel - _ <- eventsCollectorFiber.cancel - yield () + val messageDecoder = QueueMessageDecoder[IO, ProjectAuthorizationRemoved] + + withMessageHandlers(queueConfig).use { case (handlers, queueClient, solrClient) => + for + solrDoc <- SignallingRef.of[IO, Option[EntityDocument]](None) + authRemovalEvents <- SignallingRef.of[IO, Set[ProjectAuthorizationRemoved]]( + Set.empty + ) + + provisioningFiber <- handlers.userRemoved.compile.drain.start + + user = userDocumentGen.generateOne + affectedProjects = projectCreatedGen("affected") + .map(_.toSolrDocument.addMember(user.id, projectMemberRoleGen.generateOne)) + .generateList(min = 20, max = 25) + notAffectedProject = projectCreatedGen( + "not-affected" + ).generateOne.toSolrDocument + _ <- solrClient.insert(user :: notAffectedProject :: affectedProjects) + + docsCollectorFiber <- + Stream + .awakeEvery[IO](500 millis) + .evalMap(_ => solrClient.findById[User](user.id)) + .evalMap(e => solrDoc.update(_ => e)) + .compile + .drain + .start + eventsCollectorFiber <- + queueClient + .acquireEventsStream(queueConfig.projectAuthorizationRemoved, 1, None) + .evalMap(messageDecoder.decodeMessage) + .evalMap(e => authRemovalEvents.update(_ ++ e)) + .compile + .drain + .start + + _ <- scribe.cats.io.info("Waiting for test documents to be inserted") + _ <- solrDoc.waitUntil(_.nonEmpty) + _ <- scribe.cats.io.info("Test documents inserted") + + _ <- queueClient.enqueue( + queueConfig.userRemoved, + messageHeaderGen(UserRemoved.SCHEMA$).generateOne, + UserRemoved(user.id.value) + ) + + _ <- solrDoc.waitUntil(_.isEmpty) + _ <- scribe.cats.io.info( + "User has been removed. Waiting for project auth removals" + ) + + expectedAuthRemovals = + affectedProjects + .map(ap => ProjectAuthorizationRemoved(ap.id.value, user.id.value)) + .toSet + + _ <- authRemovalEvents.waitUntil(x => expectedAuthRemovals.diff(x).isEmpty) + + _ <- provisioningFiber.cancel + _ <- docsCollectorFiber.cancel + _ <- eventsCollectorFiber.cancel + yield () } - private lazy val queryProjects = Query(typeIs(EntityType.Project)) - - private def clientsAndProvisioning( - userRemovedQueue: QueueName, - authRemovedQueue: QueueName - ) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - UserRemovedProcess - .make[IO]( - userRemovedQueue, - authRemovedQueue, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - override def munitFixtures: Seq[Fixture[_]] = List(withRedisClient, withQueueClient, withSearchSolrClient) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserSyntax.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserSyntax.scala index 9c0f17bc..fcc388b2 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserSyntax.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserSyntax.scala @@ -29,11 +29,11 @@ trait UserSyntax: .into[User] .transform( Field.default(_.score), - Field.computed(_.name, u => User.nameFrom(u.firstName, u.lastName)) + Field.computed(_.name, u => User.nameFrom(u.firstName, u.lastName)), + Field.default(_.visibility) ) def update(updated: UserUpdated): UserAdded = added.copy( firstName = updated.firstName, - lastName = updated.lastName, - email = updated.email + lastName = updated.lastName ) diff --git a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala index 998e0afa..b549b7f2 100644 --- a/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala +++ b/modules/search-provision/src/test/scala/io/renku/search/provision/user/UserUpdatedProvisioningSpec.scala @@ -22,47 +22,34 @@ import cats.effect.{IO, Resource} import cats.syntax.all.* import fs2.Stream import fs2.concurrent.SignallingRef -import io.renku.avro.codec.AvroIO -import io.renku.avro.codec.encoders.all.given +import io.renku.avro.codec.all.given import io.renku.events.EventsGenerators.{stringGen, userAddedGen} import io.renku.events.v1.{UserAdded, UserUpdated} import io.renku.queue.client.Generators.messageHeaderGen -import io.renku.queue.client.QueueSpec -import io.renku.redis.client.RedisClientGenerators.* -import io.renku.redis.client.{QueueName, RedisClientGenerators} import io.renku.search.GeneratorSyntax.* import io.renku.search.model.Id -import io.renku.search.solr.client.SearchSolrSpec -import io.renku.search.solr.documents.{Entity, User} +import io.renku.search.solr.documents.{EntityDocument, User} import munit.CatsEffectSuite import scala.concurrent.duration.* +import io.renku.search.provision.ProvisioningSuite -class UserUpdatedProvisioningSpec - extends CatsEffectSuite - with QueueSpec - with SearchSolrSpec - with UserSyntax: - - private val avro = AvroIO(UserUpdated.SCHEMA$) - +class UserUpdatedProvisioningSpec extends ProvisioningSuite: (firstNameUpdate :: lastNameUpdate :: emailUpdate :: noUpdate :: Nil).foreach { case TestCase(name, updateF) => test(s"can fetch events, decode them, and update in Solr in case of $name"): - val queue = RedisClientGenerators.queueNameGen.generateOne - - clientsAndProvisioning(queue).use { case (queueClient, solrClient, provisioner) => + withMessageHandlers(queueConfig).use { case (handler, queueClient, solrClient) => for - solrDoc <- SignallingRef.of[IO, Option[Entity]](None) + solrDoc <- SignallingRef.of[IO, Option[EntityDocument]](None) - provisioningFiber <- provisioner.provisioningProcess.start + provisioningFiber <- handler.userUpdated.compile.drain.start userAdded = userAddedGen(prefix = "user-update").generateOne _ <- solrClient.insert(Seq(userAdded.toSolrDocument.widen)) userUpdated = updateF(userAdded) _ <- queueClient.enqueue( - queue, + queueConfig.userUpdated, messageHeaderGen(UserUpdated.SCHEMA$).generateOne, userUpdated ) @@ -86,18 +73,6 @@ class UserUpdatedProvisioningSpec } } - private def clientsAndProvisioning(queueName: QueueName) = - (withQueueClient() >>= withSearchSolrClient().tupleLeft) - .flatMap { case (rc, sc) => - UserUpdatedProvisioning - .make[IO]( - queueName, - withRedisClient.redisConfig, - withSearchSolrClient.solrConfig - ) - .map((rc, sc, _)) - } - private case class TestCase(name: String, f: UserAdded => UserUpdated) private lazy val firstNameUpdate = TestCase( "firstName update", diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala index f01f428a..75ddc2f2 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClient.scala @@ -21,20 +21,22 @@ package io.renku.search.solr.client import cats.data.NonEmptyList import cats.effect.{Async, Resource} import fs2.io.net.Network +import fs2.Stream import io.bullet.borer.{Decoder, Encoder} import io.renku.search.model.Id import io.renku.search.query.Query -import io.renku.search.solr.documents.Entity +import io.renku.search.solr.documents.EntityDocument import io.renku.solr.client.{QueryData, QueryResponse, SolrClient, SolrConfig} import scala.reflect.ClassTag trait SearchSolrClient[F[_]]: - def findById[D <: Entity](id: Id)(using ct: ClassTag[D]): F[Option[D]] + def findById[D <: EntityDocument](id: Id)(using ct: ClassTag[D]): F[Option[D]] def insert[D: Encoder](documents: Seq[D]): F[Unit] def deleteIds(ids: NonEmptyList[Id]): F[Unit] - def queryEntity(query: Query, limit: Int, offset: Int): F[QueryResponse[Entity]] + def queryEntity(query: Query, limit: Int, offset: Int): F[QueryResponse[EntityDocument]] def query[D: Decoder](query: QueryData): F[QueryResponse[D]] + def queryAll[D: Decoder](query: QueryData): Stream[F, D] object SearchSolrClient: def make[F[_]: Async: Network]( diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClientImpl.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClientImpl.scala index 1dd269b7..74b789b3 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClientImpl.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/client/SearchSolrClientImpl.scala @@ -21,10 +21,11 @@ package io.renku.search.solr.client import cats.data.NonEmptyList import cats.effect.Async import cats.syntax.all.* +import fs2.Stream import io.bullet.borer.{Decoder, Encoder} import io.renku.search.model.Id import io.renku.search.query.Query -import io.renku.search.solr.documents.Entity +import io.renku.search.solr.documents.EntityDocument import io.renku.search.solr.query.LuceneQueryInterpreter import io.renku.search.solr.schema.EntityDocumentSchema import io.renku.solr.client.facet.{Facet, Facets} @@ -54,12 +55,12 @@ private class SearchSolrClientImpl[F[_]: Async](solrClient: SolrClient[F]) query: Query, limit: Int, offset: Int - ): F[QueryResponse[Entity]] = + ): F[QueryResponse[EntityDocument]] = for { solrQuery <- interpreter.run(query) _ <- logger.debug(s"Query: ${query.render} ->Solr: $solrQuery") res <- solrClient - .query[Entity]( + .query[EntityDocument]( QueryData(QueryString(solrQuery.query.value, limit, offset)) .withSort(solrQuery.sort) .withFacet(Facets(typeTerms)) @@ -70,8 +71,19 @@ private class SearchSolrClientImpl[F[_]: Async](solrClient: SolrClient[F]) override def query[D: Decoder](query: QueryData): F[QueryResponse[D]] = solrClient.query[D](query) - override def findById[D <: Entity](id: Id)(using ct: ClassTag[D]): F[Option[D]] = - solrClient.findById[Entity](id.value).map(_.responseBody.docs.headOption) >>= { + override def queryAll[D: Decoder](query: QueryData): Stream[F, D] = + Stream + .iterate(query)(_.nextPage) + .evalMap(this.query) + .takeWhile(_.responseBody.docs.nonEmpty) + .flatMap(r => Stream.emits(r.responseBody.docs)) + + override def findById[D <: EntityDocument]( + id: Id + )(using ct: ClassTag[D]): F[Option[D]] = + solrClient + .findById[EntityDocument](id.value) + .map(_.responseBody.docs.headOption) >>= { case Some(e: D) => Some(e).pure[F] case Some(e) => new Exception(s"Entity '$id' is of type ${e.getClass} not ${ct.runtimeClass}") diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/Entity.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/EntityDocument.scala similarity index 77% rename from modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/Entity.scala rename to modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/EntityDocument.scala index c354e28b..79f48eb8 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/Entity.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/documents/EntityDocument.scala @@ -24,22 +24,23 @@ import io.renku.search.model.* import io.renku.search.model.projects.MemberRole import io.renku.search.model.projects.MemberRole.{Member, Owner} import io.renku.solr.client.EncoderSupport.* +import io.renku.search.model.projects.Visibility -sealed trait Entity: +sealed trait EntityDocument: val score: Option[Double] val id: Id - def widen: Entity = this + def widen: EntityDocument = this -object Entity: +object EntityDocument: val allTypes: Set[String] = Set(Project.entityType, User.entityType) given AdtEncodingStrategy = AdtEncodingStrategy.flat(typeMemberName = discriminatorField) - given Encoder[Entity] = deriveAllEncoders[Entity] - given Decoder[Entity] = deriveAllDecoders[Entity] - given Codec[Entity] = Codec.of[Entity] + given Encoder[EntityDocument] = deriveAllEncoders[EntityDocument] + given Decoder[EntityDocument] = deriveAllDecoders[EntityDocument] + given Codec[EntityDocument] = Codec.of[EntityDocument] final case class Project( id: Id, @@ -53,31 +54,35 @@ final case class Project( owners: List[Id] = List.empty, members: List[Id] = List.empty, score: Option[Double] = None -) extends Entity: +) extends EntityDocument: def addMember(userId: Id, role: MemberRole): Project = role match { - case Owner => copy(owners = (userId :: owners).distinct) - case Member => copy(members = (userId :: members).distinct) + case Owner => copy(owners = (userId :: owners).distinct, score = None) + case Member => copy(members = (userId :: members).distinct, score = None) } def removeMember(userId: Id): Project = - copy(owners = owners.filterNot(_ == userId), members = members.filterNot(_ == userId)) + copy( + owners = owners.filterNot(_ == userId), + members = members.filterNot(_ == userId), + score = None + ) object Project: - val entityType: String = "project" + val entityType: String = "Project" final case class User( id: Id, firstName: Option[users.FirstName] = None, lastName: Option[users.LastName] = None, name: Option[Name] = None, - email: Option[users.Email] = None, - score: Option[Double] = None -) extends Entity + score: Option[Double] = None, + visibility: Visibility = Visibility.Public +) extends EntityDocument object User: - val entityType: String = "user" + val entityType: String = "User" def nameFrom(firstName: Option[String], lastName: Option[String]): Option[Name] = Option(List(firstName, lastName).flatten.mkString(" ")) @@ -88,7 +93,6 @@ object User: id: Id, firstName: Option[users.FirstName] = None, lastName: Option[users.LastName] = None, - email: Option[users.Email] = None, score: Option[Double] = None ): User = User( @@ -96,6 +100,5 @@ object User: firstName, lastName, nameFrom(firstName.map(_.value), lastName.map(_.value)), - email, score ) diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/query/SolrToken.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/query/SolrToken.scala index e5d5b5b1..f900370a 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/query/SolrToken.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/query/SolrToken.scala @@ -21,7 +21,7 @@ package io.renku.search.solr.query import cats.Monoid import cats.data.NonEmptyList import cats.syntax.all.* -import io.renku.search.model.EntityType +import io.renku.search.model.{EntityType, Id} import io.renku.search.model.projects.Visibility import io.renku.search.query.{Comparison, Field} import io.renku.search.solr.documents.{Project as SolrProject, User as SolrUser} @@ -35,6 +35,7 @@ opaque type SolrToken = String object SolrToken: val empty: SolrToken = "" def fromString(str: String): SolrToken = StringEscape.queryChars(str) + def fromId(id: Id): SolrToken = fromString(id.value) def fromVisibility(v: Visibility): SolrToken = v.name def fromEntityType(et: EntityType): SolrToken = et match @@ -83,6 +84,9 @@ object SolrToken: def fieldIs(field: Field, value: SolrToken): SolrToken = fieldOp(field, Comparison.Is, value) + def fieldIs(field: FieldName, value: SolrToken): SolrToken = + s"${field.name}:$value" + def unsafeFromString(s: String): SolrToken = s private def monoidWith(sep: String): Monoid[SolrToken] = diff --git a/modules/search-solr-client/src/main/scala/io/renku/search/solr/schema/EntityDocumentSchema.scala b/modules/search-solr-client/src/main/scala/io/renku/search/solr/schema/EntityDocumentSchema.scala index 6352bf69..7312672f 100644 --- a/modules/search-solr-client/src/main/scala/io/renku/search/solr/schema/EntityDocumentSchema.scala +++ b/modules/search-solr-client/src/main/scala/io/renku/search/solr/schema/EntityDocumentSchema.scala @@ -26,7 +26,6 @@ object EntityDocumentSchema: val createdBy: FieldName = FieldName("createdBy") val creationDate: FieldName = FieldName("creationDate") val description: FieldName = FieldName("description") - val email: FieldName = FieldName("email") val entityType: FieldName = FieldName("_type") val firstName: FieldName = FieldName("firstName") val id: FieldName = FieldName("id") @@ -82,7 +81,6 @@ object EntityDocumentSchema: val userFields: Seq[SchemaCommand] = Seq( SchemaCommand.Add(Field(Fields.firstName, FieldTypes.string)), SchemaCommand.Add(Field(Fields.lastName, FieldTypes.string)), - SchemaCommand.Add(Field(Fields.email, FieldTypes.string)), SchemaCommand.Add(CopyFieldRule(Fields.firstName, Fields.contentAll)), SchemaCommand.Add(CopyFieldRule(Fields.lastName, Fields.contentAll)) ) diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala index 22221a3d..401077fa 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SearchSolrClientSpec.scala @@ -27,7 +27,7 @@ import io.renku.search.model.users import io.renku.search.query.Query import io.renku.search.solr.client.SolrDocumentGenerators.* import io.renku.search.solr.documents.EntityOps.* -import io.renku.search.solr.documents.{Entity, Project, User} +import io.renku.search.solr.documents.{EntityDocument, Project, User} import io.renku.search.solr.schema.EntityDocumentSchema.Fields import io.renku.solr.client.QueryData import munit.CatsEffectSuite diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SolrDocumentGenerators.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SolrDocumentGenerators.scala index d0f20bac..a3b4e964 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SolrDocumentGenerators.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/client/SolrDocumentGenerators.scala @@ -58,6 +58,5 @@ trait SolrDocumentGenerators: def userDocumentGen: Gen[User] = (idGen, Gen.option(userFirstNameGen), Gen.option(userLastNameGen)) .flatMapN { case (id, f, l) => - val e = (f, l).flatMapN(userEmailGen(_, _).generateSome) - User.of(id, f, l, e) + User.of(id, f, l) } diff --git a/modules/search-solr-client/src/test/scala/io/renku/search/solr/documents/EntityOps.scala b/modules/search-solr-client/src/test/scala/io/renku/search/solr/documents/EntityOps.scala index d392d007..1b4c4b21 100644 --- a/modules/search-solr-client/src/test/scala/io/renku/search/solr/documents/EntityOps.scala +++ b/modules/search-solr-client/src/test/scala/io/renku/search/solr/documents/EntityOps.scala @@ -21,8 +21,8 @@ package io.renku.search.solr.documents object EntityOps extends EntityOps trait EntityOps: - extension (entity: Entity) - def noneScore: Entity = entity match { + extension (entity: EntityDocument) + def noneScore: EntityDocument = entity match { case e: Project => e.copy(score = None) case e: User => e.copy(score = None) } diff --git a/nix/dev-scripts.nix b/nix/dev-scripts.nix index ab6f4529..ea6d37df 100644 --- a/nix/dev-scripts.nix +++ b/nix/dev-scripts.nix @@ -3,8 +3,38 @@ writeShellScriptBin, devshell-tools, }: rec { - redis-push = devshell-tools.lib.installScript { - script = ./scripts/redis-push; + redis-project-create = devshell-tools.lib.installScript { + script = ./scripts/redis-project-create; + inherit system; + }; + + redis-project-update = devshell-tools.lib.installScript { + script = ./scripts/redis-project-update; + inherit system; + }; + + redis-project-remove = devshell-tools.lib.installScript { + script = ./scripts/redis-project-remove; + inherit system; + }; + + redis-user-add = devshell-tools.lib.installScript { + script = ./scripts/redis-user-add; + inherit system; + }; + + redis-user-remove = devshell-tools.lib.installScript { + script = ./scripts/redis-user-remove; + inherit system; + }; + + redis-auth-add = devshell-tools.lib.installScript { + script = ./scripts/redis-auth-add; + inherit system; + }; + + redis-auth-remove = devshell-tools.lib.installScript { + script = ./scripts/redis-auth-remove; inherit system; }; diff --git a/nix/scripts/redis-auth-add b/nix/scripts/redis-auth-add new file mode 100644 index 00000000..640965c4 --- /dev/null +++ b/nix/scripts/redis-auth-add @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_projectAuthAdded + +role="MEMBER" +while true; do + case "$1" in + --project) + shift; project="$1"; shift + ;; + --user) + shift; user="$1"; shift + ;; + --role) + shift; role="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$project" ] || [ -z "$user" ]; then + echo "Provide a user and project id, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"authorization.added", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "projectId":$project, + "userId":$user, + "role":$role +}' +payload=$(jq --null-input --arg project "$project" --arg user "$user" --arg role "$role" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-auth-remove b/nix/scripts/redis-auth-remove new file mode 100644 index 00000000..d6485fff --- /dev/null +++ b/nix/scripts/redis-auth-remove @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_projectAuthRemoved + +role="MEMBER" +while true; do + case "$1" in + --project) + shift; project="$1"; shift + ;; + --user) + shift; user="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$project" ] || [ -z "$user" ]; then + echo "Provide a user and project id, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"authorization.removed", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "projectId":$project, + "userId":$user +}' +payload=$(jq --null-input --arg project "$project" --arg user "$user" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-project-create b/nix/scripts/redis-project-create new file mode 100644 index 00000000..6a87b9f5 --- /dev/null +++ b/nix/scripts/redis-project-create @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_projectCreated + +id=$(head -c 16 < /dev/urandom | base64) +vis="PUBLIC" +while true; do + case "$1" in + --id) + shift; id="$1"; shift + ;; + --name) + shift; name="$1"; shift + ;; + --vis|--visibility) + shift; vis="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$name" ]; then + echo "Provide a project name, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"project.created", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "id":$id, + "name":$name, + "slug":$slug, + "repositories":[], + "visibility":$vis, + "description": {"string":"my project description"}, + "createdBy":"dev", + "creationDate":0 +}' +payload=$(jq --null-input --arg id "$id" --arg name "$name" --arg slug "$name/$id" --arg vis "$vis" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-project-remove b/nix/scripts/redis-project-remove new file mode 100644 index 00000000..0d25bc32 --- /dev/null +++ b/nix/scripts/redis-project-remove @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_projectRemoved + +while true; do + case "$1" in + --id) + shift; id="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$id" ]; then + echo "Provide a project id, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"project.removed", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "id":$id, +}' +payload=$(jq --null-input --arg id "$id" --arg name "$name" --arg slug "$name/$id" --arg vis "$vis" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-project-update b/nix/scripts/redis-project-update new file mode 100644 index 00000000..466cabb2 --- /dev/null +++ b/nix/scripts/redis-project-update @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_projectUpdated + +id=$(head -c 16 < /dev/urandom | base64) +vis="PUBLIC" +while true; do + case "$1" in + --id) + shift; id="$1"; shift + ;; + --name) + shift; name="$1"; shift + ;; + --vis|--visibility) + shift; vis="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$name" ]; then + echo "Provide a project name, please." + exit 1 +fi + +header='{ + "source":"dev", + "type":"project.updated", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "id":$id, + "name":$name, + "slug":$slug, + "visibility":$vis, + "repositories": [], + "description": {"string":"my project description"}, +}' +payload=$(jq --null-input --arg id "$id" --arg name "$name" --arg slug "$id/$name" --arg vis "$vis" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-push b/nix/scripts/redis-push deleted file mode 100644 index 79860760..00000000 --- a/nix/scripts/redis-push +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -if [ -z "$1" ] || [ -z "$2" ]; then - echo "Provide a project id and name, please." - exit 1 -fi - -redis_host=${RS_REDIS_HOST:-rsdev} -redis_port=${RS_REDIS_PORT:-6379} -queue_name=$RS_REDIS_QUEUE_projectCreated - -header='{"source":"dev","type":"project.created","dataContentType":"application/avro+json","schemaVersion":"1","time":0,"requestId":"r1"}' -payload=$(jq --null-input --arg id "$1" --arg name "$2" --arg slug "$1/$2" '{"id":$id,"name":$name,"slug":$slug, "repositories":[],"visibility":"PUBLIC","description":{"string":"my project description"},"createdBy":"dev","creationDate":0}') - -redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-user-add b/nix/scripts/redis-user-add new file mode 100644 index 00000000..126702f1 --- /dev/null +++ b/nix/scripts/redis-user-add @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_userAdded + +id=$(head -c 16 < /dev/urandom | base64) +vis="PUBLIC" +while true; do + case "$1" in + --id) + shift; id="$1"; shift + ;; + --fname) + shift; fname="$1"; shift + ;; + --lname) + shift; lname="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$lname" ] || [ -z "$fname" ]; then + echo "Provide a user name, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"user.add", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "id":$id, + "firstName":{"string":$fname}, + "lastName":{"string":$lname}, + "email":{"null": null} +}' +payload=$(jq --null-input --arg id "$id" --arg fname "$fname" --arg lname "$lname" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/scripts/redis-user-remove b/nix/scripts/redis-user-remove new file mode 100644 index 00000000..603ab216 --- /dev/null +++ b/nix/scripts/redis-user-remove @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -oe pipefail + +redis_host=${RS_REDIS_HOST:-rsdev} +redis_port=${RS_REDIS_PORT:-6379} +queue_name=$RS_REDIS_QUEUE_userRemoved + +while true; do + case "$1" in + --id) + shift; id="$1"; shift + ;; + *) break + ;; + esac +done +if [ -z "$id" ]; then + echo "Provide a user id, please." + exit 1 +fi + + +header='{ + "source":"dev", + "type":"user.removed", + "dataContentType":"application/avro+json", + "schemaVersion":"V1", + "time":0, + "requestId":"r1" +}' +payload_tpl='{ + "id":$id +}' +payload=$(jq --null-input --arg id "$id" "$payload_tpl") + +echo redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" +redis-cli -h $redis_host -p $redis_port XADD $queue_name '*' headers "$header" payload "$payload" diff --git a/nix/services.nix b/nix/services.nix index 1db48b67..5328e459 100644 --- a/nix/services.nix +++ b/nix/services.nix @@ -10,6 +10,8 @@ heap = 512; }; + networking.hostName = "rsdev"; + services.dev-redis = { enable = true; instance = "search"; diff --git a/project/RedisServer.scala b/project/RedisServer.scala index 9d4daa69..898663be 100644 --- a/project/RedisServer.scala +++ b/project/RedisServer.scala @@ -21,11 +21,12 @@ import java.util.concurrent.atomic.AtomicBoolean import scala.sys.process.* import scala.util.Try -object RedisServer extends RedisServer("graph", port = 6379) +object RedisServer extends RedisServer("graph", None) @annotation.nowarn() -class RedisServer(module: String, val port: Int) { +class RedisServer(module: String, val redisPort: Option[Int]) { + val port = redisPort.orElse(sys.env.get("RS_REDIS_PORT").map(_.toInt)).getOrElse(6379) val host: String = sys.env.get("RS_REDIS_HOST").getOrElse("localhost") val url: String = s"redis://$host:$port" diff --git a/project/SolrServer.scala b/project/SolrServer.scala index 2335fea8..9943649f 100644 --- a/project/SolrServer.scala +++ b/project/SolrServer.scala @@ -23,11 +23,13 @@ import scala.annotation.tailrec import scala.sys.process.* import scala.util.Try -object SolrServer extends SolrServer("graph", port = 8983) +object SolrServer extends SolrServer("graph", None) @annotation.nowarn() -class SolrServer(module: String, port: Int) { +class SolrServer(module: String, solrPort: Option[Int]) { + private val port = + solrPort.orElse(sys.env.get("RS_SOLR_PORT").map(_.toInt)).getOrElse(8983) private val host: String = sys.env.get("RS_SOLR_HOST").getOrElse("localhost") val url: String = s"http://$host:$port"