Skip to content

Commit

Permalink
record failure reason onJobEnd
Browse files Browse the repository at this point in the history
  • Loading branch information
barend-xebia committed Dec 4, 2024
1 parent 2dd19c3 commit c1ff34e
Showing 1 changed file with 5 additions and 6 deletions.
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
package com.xebia.data.spot

import io.opentelemetry.api.common.{AttributeKey, Attributes}
import io.opentelemetry.api.common.Attributes
import io.opentelemetry.api.trace.{Span, StatusCode}
import io.opentelemetry.context.{Context, Scope}
import org.apache.spark.SparkConf
import org.apache.spark.scheduler.{
JobFailed,
JobSucceeded,
SparkListener,
SparkListenerApplicationEnd,
Expand Down Expand Up @@ -96,10 +95,10 @@ class TelemetrySparkListener(val sparkConf: SparkConf) extends SparkListener wit
override def onJobEnd(event: SparkListenerJobEnd): Unit = {
jobSpans.get(event.jobId).foreach { case (span, _, scope) =>
event.jobResult match {
case JobSucceeded => span.setStatus(StatusCode.OK)
case _ =>
// For some reason, the JobFailed case class is private[spark], and we can't record the exception.
span.setStatus(StatusCode.ERROR)
case JobSucceeded => span.setStatus(StatusCode.OK)
case jobFailed: Any =>
// The JobFailed(e) case class is private[spark], therefore we can't use span.recordException(e).
span.setStatus(StatusCode.ERROR, jobFailed.toString)
}
span.setAttribute(atts.jobTime, Long.box(event.time))
span.end()
Expand Down

0 comments on commit c1ff34e

Please sign in to comment.