Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
512502 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:11:28 | 20 ms |
1/1
| 105.3 KiB | |||
512501 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Sort [date#94278275 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94278275 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7527360]
+- InMemoryTableScan [date#94278275, drawdown_gross#94278300, drawdown_net#94278326]
+- InMemoryRelation [date#94278275, allret#94278276, allcum#94278277, drawdown_gross#94278300, drawdown_length_gross#94278323, allretnet#94278324, allcumnet#94278325, drawdown_net#94278326, drawdown_length_net#94278327, numcos#94278328, largecum#94278329, largecumnet#94278330, largeret#94278331, largeretnet#94278332, midcum#94278333, midcumnet#94278334, midret#94278335, midretnet#94278336, smallcum#94278337, smallcumnet#94278338, smallret#94278339, smallretnet#94278340], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94277187 = null) THEN null ELSE cast(date#94277187 as date) END AS date#94277363, CASE WHEN ((allret#94277188 = NA) OR (allret#94277188 = null)) THEN null ELSE cast(allret#94277188 as float) END...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:11:28 | 0.7 s |
200/200
| 105.3 KiB | 105.3 KiB | ||
512500 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94277187 = null) THEN null ELSE cast(date#94277187 as date) END AS date#94277363, CASE WHEN ((allret#94277188 = NA) OR (allret#94277188 = null)) THEN null ELSE cast(allret#94277188 as float) END AS allret#94277368, CASE WHEN ((allcum#94277189 = NA) OR (allcum#94277189 = null)) THEN null ELSE cast(allcum#94277189 as float) END AS allcum#94277369, CASE WHEN ((drawdown_gross#94277190 = NA) OR (drawdown_gross#94277190 = null)) THEN null ELSE cast(drawdown_gross#94277190 as float) END AS drawdown_gross#94277393, CASE WHEN ((drawdown_length_gross#94277191 = NA) OR (drawdown_length_gross#94277191 = null)) THEN null ELSE cast(drawdown_length_gross#94277191 as int) END AS drawdown_length_gross#94277394, CASE WHEN ((allretnet#94277192 = NA) OR (allretnet#94277192 = null)) THEN null ELSE cast(allretnet#94277192 as float) END AS allretnet#94277398, CASE WHEN ((allcumnet#94277193 = NA) OR (allcumnet#94277193 = null)) THEN null ELSE cast(allcumnet#94277193 as float) END AS allcumnet#9427740...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:11:27 | 37 ms |
1/1
| 460.4 KiB | 105.3 KiB |