Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
512420 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:09:34 | 18 ms |
1/1
| 61.0 KiB | |||
512419 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94263534 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94263534 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7526151]
+- *(1) Project [date#94263534, allcum#94263582 AS cum_gross#94263766, allcumnet#94263609 AS cum_net#94263767]
+- InMemoryTableScan [allcum#94263582, allcumnet#94263609, date#94263534]
+- InMemoryRelation [date#94263534, allret#94263558, allcum#94263582, drawdown_gross#94263583, drawdown_length_gross#94263584, allretnet#94263608, allcumnet#94263609, drawdown_net#94263610, drawdown_length_net#94263634, numcos#94263635, largecum#94263662, largecumnet#94263685, largeret#94263689, largeretnet#94263694, midcum#94263696, midcumnet#94263698, midret#94263699, midretnet#94263700, smallcum#94263702, smallcumnet#94263703, smallret#94263705, smallretnet#94263707], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94262636 = null) THEN null ELSE cast(date#94262636 as date) END AS date#94262742, ...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:09:33 | 0.8 s |
200/200
| 61.0 KiB | 61.0 KiB | ||
512418 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94262636 = null) THEN null ELSE cast(date#94262636 as date) END AS date#94262742, CASE WHEN ((allret#94262637 = NA) OR (allret#94262637 = null)) THEN null ELSE cast(allret#94262637 as float) END AS allret#94262748, CASE WHEN ((allcum#94262638 = NA) OR (allcum#94262638 = null)) THEN null ELSE cast(allcum#94262638 as float) END AS allcum#94262749, CASE WHEN ((drawdown_gross#94262639 = NA) OR (drawdown_gross#94262639 = null)) THEN null ELSE cast(drawdown_gross#94262639 as float) END AS drawdown_gross#94262750, CASE WHEN ((drawdown_length_gross#94262640 = NA) OR (drawdown_length_gross#94262640 = null)) THEN null ELSE cast(drawdown_length_gross#94262640 as int) END AS drawdown_length_gross#94262759, CASE WHEN ((allretnet#94262641 = NA) OR (allretnet#94262641 = null)) THEN null ELSE cast(allretnet#94262641 as float) END AS allretnet#94262760, CASE WHEN ((allcumnet#94262642 = NA) OR (allcumnet#94262642 = null)) THEN null ELSE cast(allcumnet#94262642 as float) END AS allcumnet#9426276...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:09:33 | 48 ms |
1/1
| 250.5 KiB | 61.0 KiB |