Stage Id ▾ | Pool Name | Description | Submitted | Duration | Tasks: Succeeded/Total | Input | Output | Shuffle Read | Shuffle Write |
---|---|---|---|---|---|---|---|---|---|
513086 | default | toStream at SparkDataStreamBuilder.scala:39 scala.collection.AbstractIterator.toStream(Iterator.scala:1431) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:25:04 | 20 ms |
1/1
| 110.2 KiB | |||
513085 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(2) Sort [date#94391851 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(date#94391851 ASC NULLS FIRST, 200), ENSURE_REQUIREMENTS, [id=#7536452]
+- *(1) Project [date#94391851, allcum#94391985 AS cum_gross#94392207, allcumnet#94392012 AS cum_net#94392208]
+- InMemoryTableScan [allcum#94391985, allcumnet#94392012, date#94391851]
+- InMemoryRelation [date#94391851, allret#94391852, allcum#94391985, drawdown_gross#94391986, drawdown_length_gross#94391988, allretnet#94392011, allcumnet#94392012, drawdown_net#94392014, drawdown_length_net#94392016, numcos#94392018, largecum#94392020, largecumnet#94392023, largeret#94392025, largeretnet#94392027, midcum#94392030, midcumnet#94392032, midret#94392034, midretnet#94392082, smallcum#94392085, smallcumnet#94392087, smallret#94392112, smallretnet#94392137], StorageLevel(disk, memory, deserialized, 1 replicas)
+- *(1) Project [CASE WHEN (date#94390885 = null) THEN null ELSE cast(date#94390885 as date) END AS date#94390995, ...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:25:03 | 1.0 s |
200/200
| 110.2 KiB | 110.2 KiB | ||
513084 | default | toLocalIterator at SparkDataStreamBuilder.scala:39
RDD: *(1) Project [CASE WHEN (date#94390885 = null) THEN null ELSE cast(date#94390885 as date) END AS date#94390995, CASE WHEN ((allret#94390886 = NA) OR (allret#94390886 = null)) THEN null ELSE cast(allret#94390886 as float) END AS allret#94390996, CASE WHEN ((allcum#94390887 = NA) OR (allcum#94390887 = null)) THEN null ELSE cast(allcum#94390887 as float) END AS allcum#94390997, CASE WHEN ((drawdown_gross#94390888 = NA) OR (drawdown_gross#94390888 = null)) THEN null ELSE cast(drawdown_gross#94390888 as float) END AS drawdown_gross#94391008, CASE WHEN ((drawdown_length_gross#94390889 = NA) OR (drawdown_length_gross#94390889 = null)) THEN null ELSE cast(drawdown_length_gross#94390889 as int) END AS drawdown_length_gross#94391010, CASE WHEN ((allretnet#94390890 = NA) OR (allretnet#94390890 = null)) THEN null ELSE cast(allretnet#94390890 as float) END AS allretnet#94391011, CASE WHEN ((allcumnet#94390891 = NA) OR (allcumnet#94390891 = null)) THEN null ELSE cast(allcumnet#94390891 as float) END AS allcumnet#9439110...
org.apache.spark.sql.Dataset.toLocalIterator(Dataset.scala:3000) plusamp.middleware.model.core.data.SparkDataStreamBuilder.$anonfun$stream$1(SparkDataStreamBuilder.scala:39) plusamp.scala.util.Profile$.time(Profile.scala:22) plusamp.middleware.model.core.data.SparkDataStreamBuilder.<init>(SparkDataStreamBuilder.scala:39) plusamp.middleware.graphql.datafile.SparkAccessor.$anonfun$retrieveData$3(SparkAccessor.scala:77) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829) | 2025/08/02 15:25:03 | 39 ms |
1/1
| 498.9 KiB | 110.2 KiB |